var/home/core/zuul-output/0000755000175000017500000000000015066755624014545 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066772073015506 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005111450115066772063017707 0ustar rootrootSep 30 13:34:58 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 13:34:58 crc restorecon[4721]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:58 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 13:34:59 crc restorecon[4721]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 13:35:00 crc kubenswrapper[4783]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.596682 4783 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601366 4783 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601392 4783 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601400 4783 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601408 4783 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601415 4783 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601420 4783 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601426 4783 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601433 4783 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601440 4783 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601446 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601451 4783 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601456 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601464 4783 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601469 4783 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601475 4783 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601482 4783 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601488 4783 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601495 4783 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601501 4783 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601506 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601512 4783 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601518 4783 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601523 4783 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601528 4783 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601533 4783 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601538 4783 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601544 4783 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601549 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601554 4783 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601559 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601564 4783 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601569 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601574 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601579 4783 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601584 4783 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601589 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601594 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601599 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601604 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601610 4783 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601617 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601623 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601629 4783 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601634 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601642 4783 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601649 4783 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601654 4783 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601660 4783 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601666 4783 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601672 4783 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601677 4783 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601682 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601687 4783 feature_gate.go:330] unrecognized feature gate: Example Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601692 4783 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601697 4783 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601702 4783 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601707 4783 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601712 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601717 4783 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601724 4783 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601731 4783 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601736 4783 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601741 4783 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601747 4783 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601752 4783 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601757 4783 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601765 4783 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601770 4783 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601776 4783 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601782 4783 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.601788 4783 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602703 4783 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602723 4783 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602737 4783 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602745 4783 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602758 4783 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602764 4783 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602773 4783 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602781 4783 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602787 4783 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602793 4783 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602800 4783 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602806 4783 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602812 4783 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602818 4783 flags.go:64] FLAG: --cgroup-root="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602824 4783 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602830 4783 flags.go:64] FLAG: --client-ca-file="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602836 4783 flags.go:64] FLAG: --cloud-config="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602841 4783 flags.go:64] FLAG: --cloud-provider="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602847 4783 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602854 4783 flags.go:64] FLAG: --cluster-domain="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602860 4783 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602866 4783 flags.go:64] FLAG: --config-dir="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602872 4783 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602894 4783 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602902 4783 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602909 4783 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602916 4783 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602922 4783 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602928 4783 flags.go:64] FLAG: --contention-profiling="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602934 4783 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602939 4783 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602946 4783 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602952 4783 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602959 4783 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602965 4783 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602971 4783 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602977 4783 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602983 4783 flags.go:64] FLAG: --enable-server="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602989 4783 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.602996 4783 flags.go:64] FLAG: --event-burst="100" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603005 4783 flags.go:64] FLAG: --event-qps="50" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603011 4783 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603017 4783 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603024 4783 flags.go:64] FLAG: --eviction-hard="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603033 4783 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603038 4783 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603045 4783 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603052 4783 flags.go:64] FLAG: --eviction-soft="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603058 4783 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603064 4783 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603070 4783 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603076 4783 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603082 4783 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603088 4783 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603093 4783 flags.go:64] FLAG: --feature-gates="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603100 4783 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603107 4783 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603113 4783 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603119 4783 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603125 4783 flags.go:64] FLAG: --healthz-port="10248" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603132 4783 flags.go:64] FLAG: --help="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603138 4783 flags.go:64] FLAG: --hostname-override="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603143 4783 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603150 4783 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603156 4783 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603161 4783 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603167 4783 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603173 4783 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603179 4783 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603184 4783 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603190 4783 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603196 4783 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603202 4783 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603209 4783 flags.go:64] FLAG: --kube-reserved="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603215 4783 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603239 4783 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603247 4783 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603252 4783 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603259 4783 flags.go:64] FLAG: --lock-file="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603264 4783 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603270 4783 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603276 4783 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603285 4783 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603292 4783 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603298 4783 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603304 4783 flags.go:64] FLAG: --logging-format="text" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603310 4783 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603317 4783 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603334 4783 flags.go:64] FLAG: --manifest-url="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603341 4783 flags.go:64] FLAG: --manifest-url-header="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603348 4783 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603355 4783 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603362 4783 flags.go:64] FLAG: --max-pods="110" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603368 4783 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603374 4783 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603380 4783 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603386 4783 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603392 4783 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603398 4783 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603405 4783 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603418 4783 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603425 4783 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603431 4783 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603437 4783 flags.go:64] FLAG: --pod-cidr="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603443 4783 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603452 4783 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603458 4783 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603464 4783 flags.go:64] FLAG: --pods-per-core="0" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603470 4783 flags.go:64] FLAG: --port="10250" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603477 4783 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603482 4783 flags.go:64] FLAG: --provider-id="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603488 4783 flags.go:64] FLAG: --qos-reserved="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603495 4783 flags.go:64] FLAG: --read-only-port="10255" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603501 4783 flags.go:64] FLAG: --register-node="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603507 4783 flags.go:64] FLAG: --register-schedulable="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603513 4783 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603523 4783 flags.go:64] FLAG: --registry-burst="10" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603529 4783 flags.go:64] FLAG: --registry-qps="5" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603535 4783 flags.go:64] FLAG: --reserved-cpus="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603541 4783 flags.go:64] FLAG: --reserved-memory="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603549 4783 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603555 4783 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603561 4783 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603567 4783 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603573 4783 flags.go:64] FLAG: --runonce="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603579 4783 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603585 4783 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603592 4783 flags.go:64] FLAG: --seccomp-default="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603597 4783 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603603 4783 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603610 4783 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603616 4783 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603622 4783 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603632 4783 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603638 4783 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603644 4783 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603650 4783 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603656 4783 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603662 4783 flags.go:64] FLAG: --system-cgroups="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603668 4783 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603678 4783 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603683 4783 flags.go:64] FLAG: --tls-cert-file="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603689 4783 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603696 4783 flags.go:64] FLAG: --tls-min-version="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603702 4783 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603708 4783 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603714 4783 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603720 4783 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603726 4783 flags.go:64] FLAG: --v="2" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603735 4783 flags.go:64] FLAG: --version="false" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603743 4783 flags.go:64] FLAG: --vmodule="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603751 4783 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.603770 4783 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603906 4783 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603913 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603921 4783 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603928 4783 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603934 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603940 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603946 4783 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603951 4783 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603957 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603962 4783 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603967 4783 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603973 4783 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603981 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603986 4783 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603991 4783 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.603996 4783 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604006 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604011 4783 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604016 4783 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604022 4783 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604027 4783 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604032 4783 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604037 4783 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604042 4783 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604047 4783 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604052 4783 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604057 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604062 4783 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604067 4783 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604074 4783 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604081 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604089 4783 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604094 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604100 4783 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604107 4783 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604113 4783 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604118 4783 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604124 4783 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604129 4783 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604134 4783 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604139 4783 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604144 4783 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604150 4783 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604155 4783 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604162 4783 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604167 4783 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604172 4783 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604177 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604184 4783 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604189 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604194 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604199 4783 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604205 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604210 4783 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604215 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604236 4783 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604241 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604257 4783 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604262 4783 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604270 4783 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604277 4783 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604282 4783 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604287 4783 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604293 4783 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604298 4783 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604303 4783 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604308 4783 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604318 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604323 4783 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604328 4783 feature_gate.go:330] unrecognized feature gate: Example Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.604334 4783 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.604342 4783 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.613992 4783 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.614067 4783 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614186 4783 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614206 4783 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614215 4783 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614250 4783 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614258 4783 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614270 4783 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614280 4783 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614289 4783 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614297 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614306 4783 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614314 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614321 4783 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614329 4783 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614337 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614345 4783 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614352 4783 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614363 4783 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614375 4783 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614384 4783 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614393 4783 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614401 4783 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614409 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614418 4783 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614426 4783 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614434 4783 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614442 4783 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614450 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614457 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614465 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614473 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614480 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614491 4783 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614501 4783 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614510 4783 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614520 4783 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614529 4783 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614537 4783 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614545 4783 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614554 4783 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614563 4783 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614572 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614580 4783 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614589 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614597 4783 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614606 4783 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614614 4783 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614622 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614630 4783 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614637 4783 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614645 4783 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614653 4783 feature_gate.go:330] unrecognized feature gate: Example Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614661 4783 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614668 4783 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614676 4783 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614685 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614693 4783 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614701 4783 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614709 4783 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614718 4783 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614726 4783 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614734 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614741 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614749 4783 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614757 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614764 4783 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614772 4783 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614780 4783 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614790 4783 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614800 4783 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614810 4783 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.614819 4783 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.614832 4783 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615048 4783 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615062 4783 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615074 4783 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615084 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615093 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615101 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615109 4783 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615117 4783 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615125 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615132 4783 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615140 4783 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615148 4783 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615155 4783 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615166 4783 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615178 4783 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615187 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615195 4783 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615204 4783 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615213 4783 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615244 4783 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615253 4783 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615261 4783 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615270 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615278 4783 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615286 4783 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615294 4783 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615302 4783 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615309 4783 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615317 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615326 4783 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615336 4783 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615346 4783 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615354 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615363 4783 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615374 4783 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615382 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615390 4783 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615397 4783 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615405 4783 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615413 4783 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615421 4783 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615429 4783 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615436 4783 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615444 4783 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615451 4783 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615460 4783 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615468 4783 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615476 4783 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615483 4783 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615490 4783 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615499 4783 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615506 4783 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615514 4783 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615522 4783 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615529 4783 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615537 4783 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615545 4783 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615552 4783 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615560 4783 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615568 4783 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615575 4783 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615584 4783 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615591 4783 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615599 4783 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615606 4783 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615614 4783 feature_gate.go:330] unrecognized feature gate: Example Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615622 4783 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615629 4783 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615637 4783 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615644 4783 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.615652 4783 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.615664 4783 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.615910 4783 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.624368 4783 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.624502 4783 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.626415 4783 server.go:997] "Starting client certificate rotation" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.626461 4783 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.626715 4783 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-10 17:30:33.408198223 +0000 UTC Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.626802 4783 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 987h55m32.781399278s for next certificate rotation Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.654192 4783 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.660553 4783 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.677967 4783 log.go:25] "Validated CRI v1 runtime API" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.718787 4783 log.go:25] "Validated CRI v1 image API" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.720689 4783 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.726629 4783 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-13-29-55-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.726677 4783 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.754042 4783 manager.go:217] Machine: {Timestamp:2025-09-30 13:35:00.75073413 +0000 UTC m=+0.682200517 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:fe87f595-c6b4-4675-9e9e-56e9408a3611 BootID:3fd15990-006c-4695-b2b8-b5f45241b454 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1b:4b:db Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1b:4b:db Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:49:84:15 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:4c:32:73 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:9f:4a:0d Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:85:b1:2a Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:42:3a:1d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ae:d6:18:b3:e6:86 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:5a:61:0c:a9:c4:03 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.754827 4783 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.755034 4783 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.756327 4783 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.756674 4783 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.756732 4783 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.759699 4783 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.760492 4783 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.761199 4783 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.761304 4783 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.761598 4783 state_mem.go:36] "Initialized new in-memory state store" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.761785 4783 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.766893 4783 kubelet.go:418] "Attempting to sync node with API server" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.766952 4783 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.767035 4783 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.767057 4783 kubelet.go:324] "Adding apiserver pod source" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.767077 4783 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.772049 4783 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.772804 4783 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.773126 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.773196 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.773490 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.773789 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.775387 4783 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776755 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776783 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776793 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776802 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776818 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776828 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776838 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776860 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776872 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776884 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776898 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.776908 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.778291 4783 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.778814 4783 server.go:1280] "Started kubelet" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.779843 4783 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.779894 4783 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 13:35:00 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.780583 4783 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.780979 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.782488 4783 server.go:460] "Adding debug handlers to kubelet server" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.782683 4783 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.782728 4783 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.783455 4783 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.783641 4783 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.783932 4783 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.783368 4783 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 08:52:11.484176946 +0000 UTC Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.784241 4783 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1099h17m10.699944528s for next certificate rotation Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.784448 4783 factory.go:55] Registering systemd factory Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.784469 4783 factory.go:221] Registration of the systemd container factory successfully Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.785251 4783 factory.go:153] Registering CRI-O factory Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.785275 4783 factory.go:221] Registration of the crio container factory successfully Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.785649 4783 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.785799 4783 factory.go:103] Registering Raw factory Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.785823 4783 manager.go:1196] Started watching for new ooms in manager Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.785391 4783 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.786587 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.88:6443: connect: connection refused" interval="200ms" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.789947 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.790191 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.790404 4783 manager.go:319] Starting recovery of all containers Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.786063 4783 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.88:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a12d2bdd36f3e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 13:35:00.778782526 +0000 UTC m=+0.710248843,LastTimestamp:2025-09-30 13:35:00.778782526 +0000 UTC m=+0.710248843,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805543 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805620 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805636 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805650 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805659 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805670 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805680 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805689 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805698 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805708 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805715 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805731 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805739 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805749 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805758 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805771 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805786 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805794 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805824 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.805834 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809399 4783 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809435 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809486 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809504 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809518 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809535 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809548 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809564 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809578 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809592 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809604 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809616 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809790 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809807 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809856 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809892 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809909 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809926 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809941 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809966 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809982 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.809997 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810015 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810030 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810045 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810060 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810077 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810096 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810116 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810132 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810147 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810170 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810183 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810202 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810217 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810250 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810283 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810297 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810335 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810348 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810367 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810384 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810402 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810428 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810448 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810466 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810483 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810510 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810529 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810548 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810562 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810575 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810596 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810612 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810630 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810647 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810662 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810679 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810692 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810705 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810727 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810745 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810759 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810773 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810791 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810804 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810817 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810836 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810850 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810866 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810878 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810890 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810903 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810917 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810956 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810975 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.810995 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811009 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811041 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811060 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811073 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811087 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811104 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811116 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811136 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811162 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811182 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811204 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811271 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811292 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811306 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811320 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811340 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811353 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811374 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811389 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811404 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811417 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811432 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811451 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811477 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811495 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811516 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811532 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811556 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811570 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811584 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811596 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811665 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811678 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811695 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811709 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811730 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811742 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811755 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811769 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811783 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811796 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811808 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811831 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811856 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811879 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811896 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811920 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811942 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811957 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811970 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811984 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.811998 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812011 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812025 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812038 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812059 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812073 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812088 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812100 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812114 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812127 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812140 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812153 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812166 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812178 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812191 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812205 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812217 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812247 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812263 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812281 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812297 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812315 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812332 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812346 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812361 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812374 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812388 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812409 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812423 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812438 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812452 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812465 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812478 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812492 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812528 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812540 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812554 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812568 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812581 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812595 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812609 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812623 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812642 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812660 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812674 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812687 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812699 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812713 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812726 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812744 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812758 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812774 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812787 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812800 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812813 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812826 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812839 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812852 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812867 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812880 4783 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812893 4783 reconstruct.go:97] "Volume reconstruction finished" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.812902 4783 reconciler.go:26] "Reconciler: start to sync state" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.820168 4783 manager.go:324] Recovery completed Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.831328 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.833545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.833587 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.833597 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.834787 4783 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.834815 4783 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.834856 4783 state_mem.go:36] "Initialized new in-memory state store" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.840145 4783 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.841753 4783 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.841798 4783 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.841829 4783 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.841876 4783 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 13:35:00 crc kubenswrapper[4783]: W0930 13:35:00.844482 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.844549 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.853016 4783 policy_none.go:49] "None policy: Start" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.855406 4783 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.855460 4783 state_mem.go:35] "Initializing new in-memory state store" Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.886243 4783 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.907637 4783 manager.go:334] "Starting Device Plugin manager" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.907976 4783 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908002 4783 server.go:79] "Starting device plugin registration server" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908531 4783 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908556 4783 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908760 4783 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908870 4783 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.908878 4783 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.921292 4783 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.942637 4783 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.942788 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944064 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944120 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944282 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944933 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944954 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.944962 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.945529 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.945551 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.945633 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.945662 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.945711 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946128 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946147 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946154 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946251 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946584 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946646 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946780 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946804 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.946815 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947231 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947239 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947323 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947357 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947378 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947392 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947512 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947557 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947807 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.947993 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.948005 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.948018 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.948031 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.948031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949048 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949102 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949122 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949174 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:00 crc kubenswrapper[4783]: I0930 13:35:00.949250 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:00 crc kubenswrapper[4783]: E0930 13:35:00.987698 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.88:6443: connect: connection refused" interval="400ms" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.008982 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.010683 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.010759 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.010781 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.010826 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.011771 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.88:6443: connect: connection refused" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014253 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014293 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014325 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014354 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014376 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014401 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014423 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014474 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014521 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014636 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014676 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014705 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014760 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014847 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.014910 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115553 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115636 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115669 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115698 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115729 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115758 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115788 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115815 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115857 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115908 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115861 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115954 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115992 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.115808 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116031 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116072 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116146 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116007 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116037 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116112 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116139 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116258 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116294 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116324 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116351 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116404 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116051 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116447 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116306 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.116567 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.212613 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.214607 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.214686 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.214705 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.214754 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.215542 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.88:6443: connect: connection refused" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.285257 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.315687 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.323024 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.343539 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.349767 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.388512 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.88:6443: connect: connection refused" interval="800ms" Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.442218 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-a0981b501ccab59c25dfa3927dae61eae8d34bb994a211b453ba79d8d07152a8 WatchSource:0}: Error finding container a0981b501ccab59c25dfa3927dae61eae8d34bb994a211b453ba79d8d07152a8: Status 404 returned error can't find the container with id a0981b501ccab59c25dfa3927dae61eae8d34bb994a211b453ba79d8d07152a8 Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.445082 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-237f3893222e0bf989626922d38647b6059eebfce07859d7779742c67f06ea79 WatchSource:0}: Error finding container 237f3893222e0bf989626922d38647b6059eebfce07859d7779742c67f06ea79: Status 404 returned error can't find the container with id 237f3893222e0bf989626922d38647b6059eebfce07859d7779742c67f06ea79 Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.446884 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3c9c681764afa1ec3b3f367618c076e589785362dffa368d72fc3dacd9656ac7 WatchSource:0}: Error finding container 3c9c681764afa1ec3b3f367618c076e589785362dffa368d72fc3dacd9656ac7: Status 404 returned error can't find the container with id 3c9c681764afa1ec3b3f367618c076e589785362dffa368d72fc3dacd9656ac7 Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.450737 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-c37c1f74961e0669174e140a306f51e9c03a4171adc83a0e6d33f882238b00d4 WatchSource:0}: Error finding container c37c1f74961e0669174e140a306f51e9c03a4171adc83a0e6d33f882238b00d4: Status 404 returned error can't find the container with id c37c1f74961e0669174e140a306f51e9c03a4171adc83a0e6d33f882238b00d4 Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.615976 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.618096 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.618157 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.618174 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.618215 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.618889 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.88:6443: connect: connection refused" node="crc" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.782839 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.804268 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.804354 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.847178 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3c9c681764afa1ec3b3f367618c076e589785362dffa368d72fc3dacd9656ac7"} Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.848751 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c37c1f74961e0669174e140a306f51e9c03a4171adc83a0e6d33f882238b00d4"} Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.849586 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"237f3893222e0bf989626922d38647b6059eebfce07859d7779742c67f06ea79"} Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.850831 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a0981b501ccab59c25dfa3927dae61eae8d34bb994a211b453ba79d8d07152a8"} Sep 30 13:35:01 crc kubenswrapper[4783]: I0930 13:35:01.851946 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9fef52de1d26f38e7dc2150c96a9d3a10ca109607294daf2bca0b41add175524"} Sep 30 13:35:01 crc kubenswrapper[4783]: W0930 13:35:01.857841 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:01 crc kubenswrapper[4783]: E0930 13:35:01.857942 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:02 crc kubenswrapper[4783]: W0930 13:35:02.060622 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:02 crc kubenswrapper[4783]: E0930 13:35:02.060711 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:02 crc kubenswrapper[4783]: E0930 13:35:02.189928 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.88:6443: connect: connection refused" interval="1.6s" Sep 30 13:35:02 crc kubenswrapper[4783]: W0930 13:35:02.326203 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:02 crc kubenswrapper[4783]: E0930 13:35:02.326389 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.419573 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.420860 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.420903 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.420931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.420960 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:02 crc kubenswrapper[4783]: E0930 13:35:02.422452 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.88:6443: connect: connection refused" node="crc" Sep 30 13:35:02 crc kubenswrapper[4783]: E0930 13:35:02.765028 4783 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.88:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a12d2bdd36f3e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 13:35:00.778782526 +0000 UTC m=+0.710248843,LastTimestamp:2025-09-30 13:35:00.778782526 +0000 UTC m=+0.710248843,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.782471 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.857331 4783 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd" exitCode=0 Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.857467 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.857480 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.858786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.858821 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.858835 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.859847 4783 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c6e3d2fc1454181f555d17a95674b08acac585a30c8f82f7099dd9787fca7b22" exitCode=0 Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.859944 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c6e3d2fc1454181f555d17a95674b08acac585a30c8f82f7099dd9787fca7b22"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.860049 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.861886 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.861934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.861947 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.865095 4783 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87" exitCode=0 Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.865283 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.865322 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.866165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.866240 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.866254 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.868735 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.869252 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.869325 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.869850 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.869889 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.869899 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.872912 4783 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7" exitCode=0 Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.872979 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7"} Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.873144 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.876105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.876143 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:02 crc kubenswrapper[4783]: I0930 13:35:02.876169 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.782179 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:03 crc kubenswrapper[4783]: E0930 13:35:03.791430 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.88:6443: connect: connection refused" interval="3.2s" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.881344 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.881403 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.884080 4783 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0bdc910d1bfde3bac6c219a5b459ad63923341ddf405bcf744dc15991c0a9592" exitCode=0 Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.884171 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0bdc910d1bfde3bac6c219a5b459ad63923341ddf405bcf744dc15991c0a9592"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.884345 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.886116 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.886151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.886163 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.886148 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d0babf244c6e52dd22c0ff4cac80c59e3648465b07b01fb3efa928bd51e420f9"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.886290 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.887590 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.887662 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.887685 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.890591 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.890646 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.890704 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.892151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.892216 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.892276 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.894879 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.894941 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051"} Sep 30 13:35:03 crc kubenswrapper[4783]: I0930 13:35:03.894971 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b"} Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.022824 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.024137 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.024177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.024185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.024248 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:04 crc kubenswrapper[4783]: E0930 13:35:04.024864 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.88:6443: connect: connection refused" node="crc" Sep 30 13:35:04 crc kubenswrapper[4783]: W0930 13:35:04.191725 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:04 crc kubenswrapper[4783]: E0930 13:35:04.191889 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:04 crc kubenswrapper[4783]: W0930 13:35:04.309276 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:04 crc kubenswrapper[4783]: E0930 13:35:04.309368 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:04 crc kubenswrapper[4783]: W0930 13:35:04.644346 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:04 crc kubenswrapper[4783]: E0930 13:35:04.644521 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:04 crc kubenswrapper[4783]: W0930 13:35:04.658844 4783 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:04 crc kubenswrapper[4783]: E0930 13:35:04.658962 4783 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.88:6443: connect: connection refused" logger="UnhandledError" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.781800 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.904154 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c"} Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.904205 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.904251 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eb34085178bfe2a3e4da509a87e43b04ada7fa50e1f8cc92689523c7a8f9fd63"} Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.904282 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa"} Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.904977 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.905008 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.905018 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906026 4783 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="386daeadebb9ab2a3aaf1942a741c2677a968935c4a90975fc9a4116db84e904" exitCode=0 Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906073 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"386daeadebb9ab2a3aaf1942a741c2677a968935c4a90975fc9a4116db84e904"} Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906095 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906173 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906217 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906216 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906903 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.906993 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907171 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907250 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907266 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907781 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907832 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907855 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907853 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907891 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:04 crc kubenswrapper[4783]: I0930 13:35:04.907900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.017125 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.017242 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.017285 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.781911 4783 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.88:6443: connect: connection refused Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.912718 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3ea5faf3db75962115e4b66f35da03773952d4711e41724c401a429afaff75a8"} Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.912770 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"37873ee718e4741e7c5045ccb50b0842e137fe39b348ffdb3907fc3c673faa8b"} Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.912784 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f1d4d7a1201dabb817d95c337cbc36703b8a82ddbfc2c2076979151f95a4d4ce"} Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.914946 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.917007 4783 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eb34085178bfe2a3e4da509a87e43b04ada7fa50e1f8cc92689523c7a8f9fd63" exitCode=255 Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.917039 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"eb34085178bfe2a3e4da509a87e43b04ada7fa50e1f8cc92689523c7a8f9fd63"} Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.917153 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.918120 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.918150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.918158 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.918645 4783 scope.go:117] "RemoveContainer" containerID="eb34085178bfe2a3e4da509a87e43b04ada7fa50e1f8cc92689523c7a8f9fd63" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.931410 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.932336 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.934656 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.934687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:05 crc kubenswrapper[4783]: I0930 13:35:05.934702 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.729001 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.729201 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.730534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.730599 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.730614 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.924343 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.927404 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e"} Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.928077 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.928183 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.930022 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.930084 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.930107 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.933984 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8fa66f459ba0d5b69e943f690fbc3dce07e5109f83a3dfb54ba4417ae0c60845"} Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.934040 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20ab4a13ac287025ab72d723f0d2dc5a44361657dce5ddf87e86d98201cf0b5a"} Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.934160 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.935594 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.935660 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:06 crc kubenswrapper[4783]: I0930 13:35:06.935685 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.226000 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.228440 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.228513 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.228537 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.228760 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.937555 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.937630 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.937585 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939093 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939161 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939271 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939311 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.939331 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:07 crc kubenswrapper[4783]: I0930 13:35:07.958660 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:08 crc kubenswrapper[4783]: I0930 13:35:08.940534 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:08 crc kubenswrapper[4783]: I0930 13:35:08.941951 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:08 crc kubenswrapper[4783]: I0930 13:35:08.942025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:08 crc kubenswrapper[4783]: I0930 13:35:08.942044 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.034600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.443770 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.443986 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.445637 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.445684 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.445704 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.729539 4783 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.729636 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.865350 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.944491 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.944587 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946196 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946396 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946295 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946531 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.946555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.970142 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.970473 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.972004 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.972059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:09 crc kubenswrapper[4783]: I0930 13:35:09.972076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.068313 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.096774 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.297601 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 13:35:10 crc kubenswrapper[4783]: E0930 13:35:10.921570 4783 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.946892 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.946995 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948343 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948397 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948417 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948511 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:10 crc kubenswrapper[4783]: I0930 13:35:10.948561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:11 crc kubenswrapper[4783]: I0930 13:35:11.948665 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:11 crc kubenswrapper[4783]: I0930 13:35:11.949971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:11 crc kubenswrapper[4783]: I0930 13:35:11.950016 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:11 crc kubenswrapper[4783]: I0930 13:35:11.950028 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:16 crc kubenswrapper[4783]: I0930 13:35:16.300926 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 13:35:16 crc kubenswrapper[4783]: I0930 13:35:16.301017 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 13:35:16 crc kubenswrapper[4783]: I0930 13:35:16.310662 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 13:35:16 crc kubenswrapper[4783]: I0930 13:35:16.310733 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 13:35:17 crc kubenswrapper[4783]: I0930 13:35:17.959981 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 30 13:35:17 crc kubenswrapper[4783]: I0930 13:35:17.960052 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 30 13:35:18 crc kubenswrapper[4783]: I0930 13:35:18.548882 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 30 13:35:18 crc kubenswrapper[4783]: I0930 13:35:18.548979 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.730369 4783 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.730468 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.872815 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.873078 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.874916 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.874974 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:19 crc kubenswrapper[4783]: I0930 13:35:19.874986 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.005281 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.005565 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.007016 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.007070 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.007089 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.026406 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.027127 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.027460 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.028163 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.028302 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.029512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.029567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.029592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.035850 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:20 crc kubenswrapper[4783]: E0930 13:35:20.921717 4783 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.977821 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.977821 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.978390 4783 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.978486 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979387 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979462 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979508 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:20 crc kubenswrapper[4783]: I0930 13:35:20.979530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.302954 4783 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.305689 4783 trace.go:236] Trace[1813513062]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 13:35:10.450) (total time: 10854ms): Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[1813513062]: ---"Objects listed" error: 10854ms (13:35:21.305) Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[1813513062]: [10.854958858s] [10.854958858s] END Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.305741 4783 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.306657 4783 trace.go:236] Trace[336262834]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 13:35:08.486) (total time: 12819ms): Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[336262834]: ---"Objects listed" error: 12819ms (13:35:21.306) Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[336262834]: [12.819600081s] [12.819600081s] END Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.306704 4783 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.306902 4783 trace.go:236] Trace[1123659196]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 13:35:09.355) (total time: 11951ms): Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[1123659196]: ---"Objects listed" error: 11951ms (13:35:21.306) Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[1123659196]: [11.951422828s] [11.951422828s] END Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.306943 4783 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.307025 4783 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.308924 4783 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.310549 4783 trace.go:236] Trace[257589849]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 13:35:09.031) (total time: 12278ms): Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[257589849]: ---"Objects listed" error: 12278ms (13:35:21.310) Sep 30 13:35:21 crc kubenswrapper[4783]: Trace[257589849]: [12.278829863s] [12.278829863s] END Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.310581 4783 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.782301 4783 apiserver.go:52] "Watching apiserver" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.870036 4783 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.870882 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.871337 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.871391 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.871543 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.873040 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.873302 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.873782 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.875910 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.876150 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.876834 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.877262 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.878159 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.880134 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887134 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887175 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887338 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887399 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887752 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.887866 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.888089 4783 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.910278 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.910777 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.910877 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.910945 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.911028 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:22.410995959 +0000 UTC m=+22.342462286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911092 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911130 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911159 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911182 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911205 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911253 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911305 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911615 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911650 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911663 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911743 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911795 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911829 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911869 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911872 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911908 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911914 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911941 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.911976 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912011 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912042 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912073 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912104 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912137 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912166 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912174 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912199 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912211 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912253 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912270 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912304 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912336 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912366 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912398 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912428 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912457 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912464 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912492 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912524 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912557 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912596 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912628 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912639 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912622 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912661 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912772 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912833 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912886 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912942 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912990 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913039 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913096 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913160 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912620 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913285 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913217 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913354 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913394 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913430 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913467 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913505 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913541 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913576 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913609 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913644 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913680 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913714 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913777 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913823 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913867 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913902 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913935 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913967 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913999 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914033 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914067 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914099 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914163 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914191 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914212 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914939 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914963 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914982 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915005 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915025 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915046 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915067 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915086 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915108 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915127 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915147 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915169 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915188 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915208 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915244 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915264 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915285 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915304 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915327 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915348 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915373 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915395 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915414 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915435 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915455 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915475 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915494 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915514 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915534 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915555 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915579 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915600 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915620 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915640 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915661 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915680 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915703 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915723 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915742 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915762 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915782 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915802 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915821 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915842 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915863 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915907 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915929 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915950 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915971 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915991 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916011 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916034 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916054 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916075 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916097 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916117 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916137 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916159 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916180 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916202 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916240 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916261 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916285 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916307 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916330 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916352 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916374 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916394 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916417 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916439 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916460 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916481 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916504 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916524 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916547 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916573 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916599 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916624 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916650 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916708 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916731 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916752 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916776 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916796 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916817 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916846 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916904 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916933 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916959 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916988 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917017 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917048 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917080 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917115 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917140 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917171 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913295 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912768 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.912942 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913036 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913206 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913217 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913579 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.913904 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914200 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914451 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.914592 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915417 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915704 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915743 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.915969 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916109 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916321 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.916825 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917187 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917899 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.918169 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.918426 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.918495 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.918683 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.918782 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.919048 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.919920 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.920191 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.920647 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.922376 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.922855 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.925247 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.925658 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.926192 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.926431 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.926702 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.926779 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.926931 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.927854 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.927873 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.927976 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928211 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928244 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928500 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928535 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928735 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.928780 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.929099 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.929365 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.929482 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.931187 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.931292 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.931555 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.933410 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.933785 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.933911 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.935260 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.935574 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.935771 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936013 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936025 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936028 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936275 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936558 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.936932 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.939499 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.939507 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.939665 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.939760 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.939981 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940136 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940248 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940335 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940358 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940705 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.940766 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.941339 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.941419 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.941640 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.941654 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.941951 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942139 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942292 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942517 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942661 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942782 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942907 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.942970 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.943065 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.943156 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.943552 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.944201 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.944514 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.944754 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.944851 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945022 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945168 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945402 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945520 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945612 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.945616 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.946340 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.946589 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.946859 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.946996 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.947678 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.947944 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948183 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948416 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948532 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948672 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948877 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.948893 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.949030 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.949073 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.949586 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.950037 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.950921 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951136 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951213 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951239 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951281 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951659 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951683 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.951465 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.917202 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952151 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952384 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952453 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952516 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952589 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952652 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952741 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952831 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952906 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952982 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953062 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953130 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953198 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953278 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953357 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953425 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953495 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953562 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953625 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953733 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953805 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953872 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953937 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.953999 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954069 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954147 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.958907 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959992 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960038 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952068 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960103 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.952384 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954050 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954244 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954484 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.954862 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.958350 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.958571 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.958825 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959199 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959481 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959473 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959719 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959848 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.959990 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960126 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960344 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960442 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960482 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960486 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960511 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960540 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960542 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960573 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960600 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960623 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960646 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960674 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960773 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960790 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960804 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960817 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960912 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960926 4783 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960939 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960950 4783 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960962 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960974 4783 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961184 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961205 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961239 4783 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961255 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961268 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961280 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961292 4783 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961304 4783 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961316 4783 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961328 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961339 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961351 4783 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961371 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961383 4783 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961395 4783 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961410 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961423 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961435 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960787 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960816 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960884 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960909 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.960930 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961554 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961587 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961614 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961772 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.961829 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.961857 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.962210 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:22.462195157 +0000 UTC m=+22.393661464 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.962018 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.962024 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.962301 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.962835 4783 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.963312 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.963372 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.963437 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:22.463418816 +0000 UTC m=+22.394885123 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.963629 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.963702 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.963772 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965132 4783 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965163 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965180 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965193 4783 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965204 4783 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965235 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965250 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965263 4783 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965275 4783 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965288 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965301 4783 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965313 4783 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965325 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965338 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965351 4783 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965363 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965375 4783 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965387 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965399 4783 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965413 4783 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965426 4783 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965438 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965450 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965462 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965474 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965486 4783 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965498 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965509 4783 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965520 4783 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965533 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965555 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965564 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965573 4783 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965581 4783 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965589 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965598 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965606 4783 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965634 4783 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965643 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965651 4783 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965659 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965667 4783 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965675 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965684 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965693 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965701 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965710 4783 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965719 4783 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965729 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965737 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965745 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965754 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965763 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965771 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965779 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965787 4783 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965795 4783 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965803 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965811 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965820 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965830 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965839 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965848 4783 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965856 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965864 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965872 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965881 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965889 4783 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965896 4783 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965905 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965914 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965922 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965930 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965939 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965947 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965955 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965965 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965973 4783 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965981 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965991 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.965999 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966008 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966017 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966025 4783 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966033 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966041 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966050 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966059 4783 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966068 4783 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966076 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966084 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966093 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966102 4783 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966123 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966132 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966140 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966148 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966156 4783 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966164 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966172 4783 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966179 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966188 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966196 4783 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966204 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966212 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966239 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966252 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966261 4783 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966271 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966279 4783 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966287 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966295 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966304 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966312 4783 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966319 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966329 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966337 4783 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966345 4783 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966354 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966363 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966371 4783 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966379 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966387 4783 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966395 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966403 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966412 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966419 4783 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966429 4783 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966437 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.966445 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.976211 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.976262 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.976276 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.976346 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:22.47632757 +0000 UTC m=+22.407793957 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.981244 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.981444 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.981545 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:21 crc kubenswrapper[4783]: E0930 13:35:21.981706 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:22.481683081 +0000 UTC m=+22.413149458 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.982016 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.982108 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.982123 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.983707 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.984114 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.986571 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.987314 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988183 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988191 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e"} Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988294 4783 scope.go:117] "RemoveContainer" containerID="eb34085178bfe2a3e4da509a87e43b04ada7fa50e1f8cc92689523c7a8f9fd63" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988452 4783 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e" exitCode=255 Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988585 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988688 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.988995 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.989154 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.989762 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.990149 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:21 crc kubenswrapper[4783]: I0930 13:35:21.991058 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.000960 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.056698 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.057194 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.057554 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.058073 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.058515 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.059646 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.060859 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.064856 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.067777 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068044 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068127 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068298 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068336 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068593 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068765 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068857 4783 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068883 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068907 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.068953 4783 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069023 4783 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069100 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069123 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069132 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069192 4783 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069274 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069388 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069406 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069417 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069430 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069439 4783 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069448 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069456 4783 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069467 4783 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069476 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069486 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069496 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069506 4783 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069517 4783 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069526 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069537 4783 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069545 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.069554 4783 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.070765 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.083481 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.094608 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.095678 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.097378 4783 scope.go:117] "RemoveContainer" containerID="56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e" Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.097753 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.099548 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.110935 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.111735 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.114100 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.130361 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.144155 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.148170 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.155425 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.157162 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.170744 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.170784 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.170805 4783 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.170826 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.170845 4783 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.174027 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.207270 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 13:35:22 crc kubenswrapper[4783]: W0930 13:35:22.221356 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-10cb9d86b659a453697f20728e546d07c923e25656a7af60078b6b5e34ef3566 WatchSource:0}: Error finding container 10cb9d86b659a453697f20728e546d07c923e25656a7af60078b6b5e34ef3566: Status 404 returned error can't find the container with id 10cb9d86b659a453697f20728e546d07c923e25656a7af60078b6b5e34ef3566 Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.235970 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.255073 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 13:35:22 crc kubenswrapper[4783]: W0930 13:35:22.268815 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-961e34f84b349be4620836af362d853136d03cbbb7135ea7aef0a41bcd489619 WatchSource:0}: Error finding container 961e34f84b349be4620836af362d853136d03cbbb7135ea7aef0a41bcd489619: Status 404 returned error can't find the container with id 961e34f84b349be4620836af362d853136d03cbbb7135ea7aef0a41bcd489619 Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.271568 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.472560 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.472629 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.472669 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.472960 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.472971 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:23.472951251 +0000 UTC m=+23.404417558 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.473047 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:23.473027983 +0000 UTC m=+23.404494290 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.473063 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.473107 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:23.473099186 +0000 UTC m=+23.404565493 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.573337 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.573436 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.573590 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.573618 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.573640 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.573709 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:23.573680634 +0000 UTC m=+23.505146981 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.573997 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.574035 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.574049 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.574112 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:23.574093547 +0000 UTC m=+23.505559864 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.849194 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.850473 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.853119 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.853809 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.854405 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.854926 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.855554 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.856474 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.857173 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.857825 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.858491 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.859309 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.859963 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.863006 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.864183 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.865345 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.866902 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.867917 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.869163 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.870559 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.872442 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.873491 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.874177 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.875075 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.875668 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.876534 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.877408 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.878035 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.878780 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.879531 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.880183 4783 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.880346 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.882093 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.884062 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.884860 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.886788 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.887765 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.888453 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.889411 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.890473 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.891120 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.891960 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.892844 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.894600 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.895777 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.896969 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.898368 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.899964 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.901055 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.902100 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.903150 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.905612 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.907021 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.908085 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.994023 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.997343 4783 scope.go:117] "RemoveContainer" containerID="56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e" Sep 30 13:35:22 crc kubenswrapper[4783]: E0930 13:35:22.997594 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 13:35:22 crc kubenswrapper[4783]: I0930 13:35:22.998117 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"af1538611897ad1b2ef7785e4f2dc10986f840ddb72a24c48c973c7ea3746deb"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.001546 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.001582 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.001598 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"961e34f84b349be4620836af362d853136d03cbbb7135ea7aef0a41bcd489619"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.003709 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.003780 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"10cb9d86b659a453697f20728e546d07c923e25656a7af60078b6b5e34ef3566"} Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.013568 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.029825 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.042525 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.058115 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.077710 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.097373 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.112469 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.125917 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.140130 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.156100 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.208089 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.223612 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.239117 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.263683 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:23Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.481539 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.481666 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.481737 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.481833 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.481907 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:25.481883757 +0000 UTC m=+25.413350094 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.482504 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.482637 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:25.482582419 +0000 UTC m=+25.414048766 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.483049 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:25.483013823 +0000 UTC m=+25.414480170 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.583064 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.583174 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583328 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583390 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583417 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583335 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583511 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:25.583480318 +0000 UTC m=+25.514946665 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583522 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583545 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.583598 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:25.583580901 +0000 UTC m=+25.515047248 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.842555 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.842655 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.842709 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.842807 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:23 crc kubenswrapper[4783]: I0930 13:35:23.842574 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:23 crc kubenswrapper[4783]: E0930 13:35:23.842897 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.280788 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-gcx27"] Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.281157 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.282820 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.283110 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.283557 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.283724 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.302077 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.315519 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.324751 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.336174 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.350736 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.361204 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.373332 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.373496 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-kszvl"] Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.374033 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.375325 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.375524 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.378496 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.389306 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.398419 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53acc368-19fd-4980-a438-1122e2b7c12e-host\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.398449 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfjth\" (UniqueName: \"kubernetes.io/projected/53acc368-19fd-4980-a438-1122e2b7c12e-kube-api-access-sfjth\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.398465 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/53acc368-19fd-4980-a438-1122e2b7c12e-serviceca\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.400742 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.413160 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.423969 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.432188 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.444320 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.456149 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.466333 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.481606 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.495035 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499256 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499325 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-hosts-file\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499359 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499374 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53acc368-19fd-4980-a438-1122e2b7c12e-host\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499390 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfjth\" (UniqueName: \"kubernetes.io/projected/53acc368-19fd-4980-a438-1122e2b7c12e-kube-api-access-sfjth\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499405 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/53acc368-19fd-4980-a438-1122e2b7c12e-serviceca\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499425 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499448 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfwzt\" (UniqueName: \"kubernetes.io/projected/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-kube-api-access-gfwzt\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.499517 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:29.49950284 +0000 UTC m=+29.430969147 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.499575 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.499598 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:29.499592523 +0000 UTC m=+29.431058830 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.499621 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53acc368-19fd-4980-a438-1122e2b7c12e-host\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.500760 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/53acc368-19fd-4980-a438-1122e2b7c12e-serviceca\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.500838 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.500879 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:29.500869574 +0000 UTC m=+29.432335881 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.516978 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfjth\" (UniqueName: \"kubernetes.io/projected/53acc368-19fd-4980-a438-1122e2b7c12e-kube-api-access-sfjth\") pod \"node-ca-gcx27\" (UID: \"53acc368-19fd-4980-a438-1122e2b7c12e\") " pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.596431 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gcx27" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.600391 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-hosts-file\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.600480 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-hosts-file\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.600507 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.600574 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfwzt\" (UniqueName: \"kubernetes.io/projected/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-kube-api-access-gfwzt\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.600607 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.600689 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.600935 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.600972 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.600758 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.601036 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.601048 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.601096 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:29.600997998 +0000 UTC m=+29.532464305 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.601113 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:29.601106042 +0000 UTC m=+29.532572349 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:25 crc kubenswrapper[4783]: W0930 13:35:25.609625 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53acc368_19fd_4980_a438_1122e2b7c12e.slice/crio-2bc1dfd8091c9284487af41d76be3f6b6fe8afe1ba7dae85a418fa23dabe1e76 WatchSource:0}: Error finding container 2bc1dfd8091c9284487af41d76be3f6b6fe8afe1ba7dae85a418fa23dabe1e76: Status 404 returned error can't find the container with id 2bc1dfd8091c9284487af41d76be3f6b6fe8afe1ba7dae85a418fa23dabe1e76 Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.619753 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfwzt\" (UniqueName: \"kubernetes.io/projected/27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e-kube-api-access-gfwzt\") pod \"node-resolver-kszvl\" (UID: \"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\") " pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.688308 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kszvl" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.748166 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-mxltm"] Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.748775 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.749084 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-2pmr9"] Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.749407 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.750593 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-668zf"] Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.750933 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752387 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752422 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752539 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752563 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752778 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752825 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752881 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.752903 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.753033 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.753135 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.753246 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.753252 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.764303 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.786249 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.805440 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.818915 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.831546 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.842919 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.843030 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.843322 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.843370 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.843416 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:25 crc kubenswrapper[4783]: E0930 13:35:25.843473 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.843848 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.860239 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.883773 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.898992 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.904971 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-multus\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905011 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-proxy-tls\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905031 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tfvk\" (UniqueName: \"kubernetes.io/projected/17cdc54d-47d0-41b9-be99-f8293fa63ec6-kube-api-access-6tfvk\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905102 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cnibin\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905122 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-os-release\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905241 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-etc-kubernetes\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905330 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-rootfs\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905402 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-socket-dir-parent\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905436 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-system-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905452 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-netns\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905468 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905499 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-cni-binary-copy\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905514 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-bin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905532 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-cnibin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905546 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905579 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-kubelet\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905595 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905610 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-multus-certs\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905626 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjh8r\" (UniqueName: \"kubernetes.io/projected/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-kube-api-access-cjh8r\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905644 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-system-cni-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905664 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-hostroot\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905678 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-k8s-cni-cncf-io\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905711 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-os-release\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905744 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqvlm\" (UniqueName: \"kubernetes.io/projected/e4186982-08f1-4809-be4f-25f86353ccf1-kube-api-access-xqvlm\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905785 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-multus-daemon-config\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905812 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905835 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.905865 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-conf-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.919444 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.936884 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.950036 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:25 crc kubenswrapper[4783]: I0930 13:35:25.995687 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:25Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007281 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-multus-daemon-config\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007324 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007351 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007371 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-conf-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007391 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-multus\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007413 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-proxy-tls\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007432 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tfvk\" (UniqueName: \"kubernetes.io/projected/17cdc54d-47d0-41b9-be99-f8293fa63ec6-kube-api-access-6tfvk\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007462 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cnibin\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007482 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-os-release\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007484 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-multus\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007512 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-socket-dir-parent\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007510 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-conf-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007531 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-etc-kubernetes\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007558 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-etc-kubernetes\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007585 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-rootfs\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007601 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cnibin\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007606 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-system-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007624 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-netns\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007633 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-rootfs\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007739 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-netns\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007742 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007769 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-cni-binary-copy\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007786 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007799 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-system-cni-dir\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007828 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-multus-socket-dir-parent\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007865 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-bin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007841 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-cni-bin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007899 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-os-release\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007932 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-cnibin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007962 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.007988 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008009 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-kubelet\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008035 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-multus-certs\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008074 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjh8r\" (UniqueName: \"kubernetes.io/projected/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-kube-api-access-cjh8r\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008098 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-system-cni-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008117 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-hostroot\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008143 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-k8s-cni-cncf-io\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008162 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-os-release\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008182 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqvlm\" (UniqueName: \"kubernetes.io/projected/e4186982-08f1-4809-be4f-25f86353ccf1-kube-api-access-xqvlm\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008237 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-cnibin\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008281 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-var-lib-kubelet\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008481 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-multus-certs\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008490 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-multus-daemon-config\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008505 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4186982-08f1-4809-be4f-25f86353ccf1-cni-binary-copy\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008519 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-hostroot\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008519 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008548 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-host-run-k8s-cni-cncf-io\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008551 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-system-cni-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008605 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4186982-08f1-4809-be4f-25f86353ccf1-os-release\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008797 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/17cdc54d-47d0-41b9-be99-f8293fa63ec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.008925 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.009010 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/17cdc54d-47d0-41b9-be99-f8293fa63ec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.012310 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-proxy-tls\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.014022 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3"} Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.015827 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kszvl" event={"ID":"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e","Type":"ContainerStarted","Data":"0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e"} Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.015851 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kszvl" event={"ID":"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e","Type":"ContainerStarted","Data":"05222ab0ad472a2c801befaae2cee184cd04cfaaefd76f51a3177cde2474b097"} Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.017075 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gcx27" event={"ID":"53acc368-19fd-4980-a438-1122e2b7c12e","Type":"ContainerStarted","Data":"c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f"} Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.017158 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gcx27" event={"ID":"53acc368-19fd-4980-a438-1122e2b7c12e","Type":"ContainerStarted","Data":"2bc1dfd8091c9284487af41d76be3f6b6fe8afe1ba7dae85a418fa23dabe1e76"} Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.027116 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.033756 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjh8r\" (UniqueName: \"kubernetes.io/projected/b379cdd3-61d0-47bd-8d9c-4f7809bb75cb-kube-api-access-cjh8r\") pod \"machine-config-daemon-668zf\" (UID: \"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\") " pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.042316 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqvlm\" (UniqueName: \"kubernetes.io/projected/e4186982-08f1-4809-be4f-25f86353ccf1-kube-api-access-xqvlm\") pod \"multus-2pmr9\" (UID: \"e4186982-08f1-4809-be4f-25f86353ccf1\") " pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.050587 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tfvk\" (UniqueName: \"kubernetes.io/projected/17cdc54d-47d0-41b9-be99-f8293fa63ec6-kube-api-access-6tfvk\") pod \"multus-additional-cni-plugins-mxltm\" (UID: \"17cdc54d-47d0-41b9-be99-f8293fa63ec6\") " pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.064209 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.084375 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.089018 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-mxltm" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.096024 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2pmr9" Sep 30 13:35:26 crc kubenswrapper[4783]: W0930 13:35:26.099325 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17cdc54d_47d0_41b9_be99_f8293fa63ec6.slice/crio-3afe1851229e23acb56dfe23497b44dd75ad8777aef3219e6d6774fa6021e6ac WatchSource:0}: Error finding container 3afe1851229e23acb56dfe23497b44dd75ad8777aef3219e6d6774fa6021e6ac: Status 404 returned error can't find the container with id 3afe1851229e23acb56dfe23497b44dd75ad8777aef3219e6d6774fa6021e6ac Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.102011 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.102255 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.118614 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.133814 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.145710 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-22xvs"] Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.146458 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.148182 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.148443 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.148587 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.148952 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.149022 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.149233 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.149309 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.154166 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.167734 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.182175 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.204079 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.217035 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.232037 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.244927 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.260521 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.272156 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.289799 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.302554 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312030 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312063 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312086 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312109 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312135 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312254 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312316 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312397 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312426 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312462 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312485 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312514 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312546 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312569 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-298pd\" (UniqueName: \"kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312651 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312677 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312698 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312722 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.312742 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.314153 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.323668 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.339830 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.359187 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.376671 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.413956 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414001 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414027 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414050 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414066 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414125 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414105 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414157 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414157 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414162 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414191 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414177 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414203 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414353 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414452 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414731 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414792 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414822 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414849 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414878 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.414961 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415033 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415070 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415092 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415115 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415126 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415136 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415191 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-298pd\" (UniqueName: \"kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415200 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415366 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415399 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415412 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415417 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.415534 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.416694 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.416722 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.417810 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.419835 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.438656 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-298pd\" (UniqueName: \"kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd\") pod \"ovnkube-node-22xvs\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.477614 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:26 crc kubenswrapper[4783]: W0930 13:35:26.488999 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dab54f2_3ab6_480a_bfe8_8d8b17a7f81b.slice/crio-20c2ff68d53fc441954e6cce30c566ee27c77827f120df1f4864a64c949abad4 WatchSource:0}: Error finding container 20c2ff68d53fc441954e6cce30c566ee27c77827f120df1f4864a64c949abad4: Status 404 returned error can't find the container with id 20c2ff68d53fc441954e6cce30c566ee27c77827f120df1f4864a64c949abad4 Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.733052 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.737379 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.741278 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.747578 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.760061 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.771916 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.782925 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.795904 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.813813 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.826017 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.839291 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.853485 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.867754 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.878805 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.889008 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.900638 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.912989 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.926588 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.936136 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.949565 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.967521 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.982679 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:26 crc kubenswrapper[4783]: I0930 13:35:26.995504 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:26Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.009243 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.019937 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.021851 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967" exitCode=0 Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.021917 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.021941 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"20c2ff68d53fc441954e6cce30c566ee27c77827f120df1f4864a64c949abad4"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.024548 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.024587 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.024597 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"8f20688effb1b9e8a4969474f6f3db256e71b747bcc79ae06bb824b4d6afb1b5"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.026560 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerStarted","Data":"db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.026607 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerStarted","Data":"4d6a39eec59531d78b767616f91b93aac20c0866a4f5de911527461d27e3616b"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.028167 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f" exitCode=0 Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.028260 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.028306 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerStarted","Data":"3afe1851229e23acb56dfe23497b44dd75ad8777aef3219e6d6774fa6021e6ac"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.036925 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.051488 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.062847 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.074214 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.117304 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.154942 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.221171 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.238252 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.274167 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.314233 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.351327 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.406860 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.438133 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.473478 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.510359 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.554498 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.598545 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.630446 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.674963 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.709745 4783 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.712250 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.712286 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.712298 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.712409 4783 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.722071 4783 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.722307 4783 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.723626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.723675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.723687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.723707 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.723720 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.743145 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.746840 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.746889 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.746902 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.746920 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.746934 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.758467 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.761573 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.761600 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.761609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.761623 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.761634 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.773102 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.776648 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.776672 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.776681 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.776695 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.776705 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.788624 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.791519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.791559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.791574 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.791594 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.791607 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.802843 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:27Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.802960 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.805280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.805321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.805337 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.805356 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.805368 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.843205 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.843392 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.843464 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.843464 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.843642 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:27 crc kubenswrapper[4783]: E0930 13:35:27.843944 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.908021 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.908066 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.908078 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.908097 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:27 crc kubenswrapper[4783]: I0930 13:35:27.908109 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:27Z","lastTransitionTime":"2025-09-30T13:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.011551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.011602 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.011611 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.011638 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.011649 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.033047 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c" exitCode=0 Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.033119 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.036366 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.036402 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.036411 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.036422 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.036430 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.049522 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.068629 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.085279 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.106294 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.113340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.113372 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.113381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.113397 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.113406 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.122960 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.135433 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.147781 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.157564 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.168112 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.179493 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.193343 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.204091 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.215998 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.216036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.216071 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.216087 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.216095 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.240250 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.269750 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:28Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.319504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.319815 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.319830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.319849 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.319863 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.422710 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.422732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.422740 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.422752 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.422760 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.524851 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.524907 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.524925 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.524948 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.524964 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.548324 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.549122 4783 scope.go:117] "RemoveContainer" containerID="56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e" Sep 30 13:35:28 crc kubenswrapper[4783]: E0930 13:35:28.549444 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.628382 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.628455 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.628480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.628510 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.628533 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.731471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.731542 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.731568 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.731599 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.731622 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.835057 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.835138 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.835165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.835194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.835216 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.937534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.937576 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.937587 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.937603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:28 crc kubenswrapper[4783]: I0930 13:35:28.937615 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:28Z","lastTransitionTime":"2025-09-30T13:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.039890 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.039924 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.039935 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.039950 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.039961 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.044064 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.047036 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2" exitCode=0 Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.047105 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.069527 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.088553 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.112485 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.131209 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.143371 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.143446 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.143463 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.143483 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.143496 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.149118 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.165692 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.178775 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.196290 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.212141 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.223936 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.237662 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.248023 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.248066 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.248078 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.248094 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.248105 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.251063 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.264624 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.281324 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.350374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.350429 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.350440 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.350466 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.350480 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.452686 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.452754 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.452769 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.452796 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.452812 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.546548 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.546728 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.546759 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:37.546728101 +0000 UTC m=+37.478194408 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.546820 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.546937 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.546984 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:37.546973679 +0000 UTC m=+37.478440066 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.547016 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.547099 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:37.547074951 +0000 UTC m=+37.478541288 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.556471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.556519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.556531 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.556548 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.556558 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.648066 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.648444 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648555 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648578 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648590 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648641 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:37.648623551 +0000 UTC m=+37.580089878 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648805 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.648910 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.649001 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.649123 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:37.649111007 +0000 UTC m=+37.580577314 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.659636 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.659693 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.659712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.659736 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.659754 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.763078 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.763126 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.763139 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.763155 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.763166 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.843150 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.843246 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.843269 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.843377 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.843475 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:29 crc kubenswrapper[4783]: E0930 13:35:29.843615 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.866350 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.866703 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.866856 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.867062 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.867251 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.971032 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.971092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.971104 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.971123 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:29 crc kubenswrapper[4783]: I0930 13:35:29.971134 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:29Z","lastTransitionTime":"2025-09-30T13:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.055526 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7" exitCode=0 Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.055593 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.073976 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.074264 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.074369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.074466 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.074587 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.083161 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.100533 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.113995 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.127405 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.141507 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.158376 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.173856 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.177613 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.177681 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.177719 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.177738 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.177750 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.191379 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.209305 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.226974 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.239830 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.257101 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.269152 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.279703 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.279732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.279739 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.279751 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.279760 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.288818 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.381990 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.382035 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.382044 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.382056 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.382066 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.485528 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.485604 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.485620 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.485642 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.485657 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.588273 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.588310 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.588320 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.588333 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.588341 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.691409 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.691436 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.691444 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.691456 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.691464 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.794634 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.794668 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.794676 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.794688 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.794698 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.864058 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.886812 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.897490 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.897552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.897571 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.897598 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.897634 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:30Z","lastTransitionTime":"2025-09-30T13:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.911003 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.925460 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.949177 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.962092 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:30 crc kubenswrapper[4783]: I0930 13:35:30.983662 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.000679 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.000742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.000760 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.000789 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.000808 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.015153 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.066038 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.068987 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794" exitCode=0 Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.069024 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.085876 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102768 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102839 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102852 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102869 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102906 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.102974 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.114149 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.123286 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.140701 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.155210 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.167466 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.175577 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.187154 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.197870 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.205106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.205143 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.205152 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.205175 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.205192 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.212279 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.222499 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.236441 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.252853 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.266918 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.283053 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.296218 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307155 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307342 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307372 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307383 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307400 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.307412 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.324053 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.335804 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.409469 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.409572 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.409597 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.409625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.409649 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.512710 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.512756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.512770 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.512786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.512798 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.615170 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.615218 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.615265 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.615284 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.615299 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.717380 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.717464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.717487 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.717512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.717530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.820878 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.820943 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.820960 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.820985 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.821002 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.842328 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.842374 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:31 crc kubenswrapper[4783]: E0930 13:35:31.842506 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.842525 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:31 crc kubenswrapper[4783]: E0930 13:35:31.842673 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:31 crc kubenswrapper[4783]: E0930 13:35:31.842823 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.923789 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.923843 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.923859 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.923881 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:31 crc kubenswrapper[4783]: I0930 13:35:31.923898 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:31Z","lastTransitionTime":"2025-09-30T13:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.026375 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.026435 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.026458 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.026486 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.026510 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.078143 4783 generic.go:334] "Generic (PLEG): container finished" podID="17cdc54d-47d0-41b9-be99-f8293fa63ec6" containerID="9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7" exitCode=0 Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.078204 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerDied","Data":"9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.095199 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.117607 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.129516 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.129547 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.129559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.129576 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.129590 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.148339 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.162705 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.175791 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.189443 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.210365 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.224484 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.232260 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.232295 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.232328 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.232344 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.232355 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.237755 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.250404 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.261174 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.276921 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.290579 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.300284 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:32Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.336371 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.336457 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.336477 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.336498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.336551 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.439096 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.439642 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.439748 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.439848 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.440077 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.543328 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.543647 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.543666 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.543690 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.543708 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.646519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.646554 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.646563 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.646577 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.646587 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.750110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.750191 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.750214 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.750283 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.750309 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.853483 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.853545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.853567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.853596 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.853618 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.956501 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.956560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.956570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.956584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:32 crc kubenswrapper[4783]: I0930 13:35:32.956594 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:32Z","lastTransitionTime":"2025-09-30T13:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.060409 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.060482 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.060506 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.060536 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.060559 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.085426 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" event={"ID":"17cdc54d-47d0-41b9-be99-f8293fa63ec6","Type":"ContainerStarted","Data":"207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.096031 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.097527 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.097639 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.106454 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.123171 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.139978 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.156186 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.171318 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.181903 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.181935 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.181944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.181957 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.181994 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.185104 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.186819 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.187021 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.203841 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.232454 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.244967 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.261660 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.274599 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.284164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.284205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.284214 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.284247 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.284258 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.290375 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.311456 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.328737 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.345822 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.361587 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.379265 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.386983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.387024 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.387038 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.387058 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.387074 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.391578 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.411461 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.421397 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.435768 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.459305 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.479354 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.489952 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.490013 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.490029 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.490051 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.490069 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.501516 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.528615 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.541872 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.562200 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.576996 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:33Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.593166 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.593213 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.593278 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.593308 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.593330 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.696456 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.696520 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.696538 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.696562 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.696581 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.799900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.800024 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.800045 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.800069 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.800086 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.842697 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.842774 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.842715 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:33 crc kubenswrapper[4783]: E0930 13:35:33.842855 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:33 crc kubenswrapper[4783]: E0930 13:35:33.842962 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:33 crc kubenswrapper[4783]: E0930 13:35:33.843180 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.903560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.903616 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.903634 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.903660 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:33 crc kubenswrapper[4783]: I0930 13:35:33.903679 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:33Z","lastTransitionTime":"2025-09-30T13:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.007200 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.007294 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.007310 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.007370 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.007392 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.099921 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.109881 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.109921 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.109937 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.109962 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.109979 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.213265 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.213355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.213369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.213387 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.213420 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.316673 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.316713 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.316722 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.316736 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.316748 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.420280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.420321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.420332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.420348 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.420358 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.522623 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.522865 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.522874 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.522908 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.522916 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.628617 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.628678 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.628698 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.628735 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.628756 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.732157 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.732203 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.732265 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.732282 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.732293 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.835675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.835714 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.835727 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.835742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.835753 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.938637 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.938714 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.938738 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.938769 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:34 crc kubenswrapper[4783]: I0930 13:35:34.938793 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:34Z","lastTransitionTime":"2025-09-30T13:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.042076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.042133 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.042152 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.042178 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.042195 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.104370 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.144390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.144446 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.144462 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.144485 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.144502 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.248385 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.248444 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.248463 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.248488 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.248506 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.351649 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.351689 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.351705 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.351726 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.351741 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.455095 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.455156 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.455286 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.455326 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.455348 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.557895 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.557939 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.557953 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.557975 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.557990 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.660845 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.660924 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.660947 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.660977 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.661002 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.763535 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.763874 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.763982 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.764144 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.764292 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.842740 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.842974 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.842951 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:35 crc kubenswrapper[4783]: E0930 13:35:35.843089 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:35 crc kubenswrapper[4783]: E0930 13:35:35.843213 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:35 crc kubenswrapper[4783]: E0930 13:35:35.843396 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.866508 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.866580 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.866594 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.866613 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.866626 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.969589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.969646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.969659 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.969706 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:35 crc kubenswrapper[4783]: I0930 13:35:35.969720 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:35Z","lastTransitionTime":"2025-09-30T13:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.072504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.072541 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.072553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.072567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.072579 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.174745 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.174808 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.174823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.174838 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.174850 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.279313 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.279376 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.279395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.279421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.279439 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.382002 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.382059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.382076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.382110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.382126 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.485386 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.485423 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.485432 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.485445 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.485454 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.588294 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.588341 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.588352 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.588369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.588382 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.690715 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.690779 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.690796 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.690823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.690842 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.793646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.793681 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.793690 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.793705 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.793714 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.895992 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.896080 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.896102 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.896132 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.896155 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.999460 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.999530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:36 crc kubenswrapper[4783]: I0930 13:35:36.999549 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:36.999573 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:36.999591 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:36Z","lastTransitionTime":"2025-09-30T13:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.102699 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.102756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.102776 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.102801 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.102817 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.114538 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/0.log" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.119120 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7" exitCode=1 Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.119193 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.120822 4783 scope.go:117] "RemoveContainer" containerID="0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.147561 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.172353 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.187815 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.203673 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.204990 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.205032 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.205046 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.205061 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.205069 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.222383 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.251564 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.272104 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.291886 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.307767 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.307848 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.307899 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.307931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.307954 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.313844 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.332281 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.351165 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.369645 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.391631 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.409857 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:37Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.410483 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.410529 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.410540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.410557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.410570 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.513535 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.513578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.513591 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.513607 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.513618 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.617031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.617084 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.617104 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.617127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.617145 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.634186 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.634422 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:35:53.634391484 +0000 UTC m=+53.565857831 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.634505 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.634578 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.634715 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.634762 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.634820 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:53.634794067 +0000 UTC m=+53.566260404 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.634850 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:53.634837419 +0000 UTC m=+53.566303756 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.720727 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.720790 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.720808 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.720837 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.720858 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.735303 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.735366 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735544 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735586 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735602 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735544 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735693 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735708 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735669 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:53.735649255 +0000 UTC m=+53.667115572 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.735808 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:53.735760198 +0000 UTC m=+53.667226555 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.823428 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.823492 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.823510 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.823534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.823552 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.843057 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.843133 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.843171 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.843569 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.843430 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:37 crc kubenswrapper[4783]: E0930 13:35:37.843715 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.925409 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.925480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.925504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.925534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:37 crc kubenswrapper[4783]: I0930 13:35:37.925560 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:37Z","lastTransitionTime":"2025-09-30T13:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.027256 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.027333 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.027355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.027384 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.027405 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.047678 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:38Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.053315 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.053373 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.053391 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.053414 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.053432 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.073461 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:38Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.078864 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.078926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.078938 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.078955 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.078968 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.093585 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:38Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.097751 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.097785 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.097795 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.097811 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.097822 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.110820 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:38Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.115547 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.115579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.115589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.115605 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.115617 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.124182 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/0.log" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.126855 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2"} Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.130199 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:38Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:38 crc kubenswrapper[4783]: E0930 13:35:38.130397 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.131901 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.131934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.131948 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.131966 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.131981 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.233986 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.234021 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.234032 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.234047 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.234059 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.337180 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.337257 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.337274 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.337296 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.337313 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.440823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.440884 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.440920 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.440951 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.440977 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.543502 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.543578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.543600 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.543626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.543646 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.646091 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.646131 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.646142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.646173 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.646189 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.772464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.772533 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.772560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.772592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.772614 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.875399 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.875443 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.875454 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.875471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.875482 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.978717 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.978815 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.978839 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.978863 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:38 crc kubenswrapper[4783]: I0930 13:35:38.978880 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:38Z","lastTransitionTime":"2025-09-30T13:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.027204 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl"] Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.027999 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.030760 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.032083 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.048634 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.048676 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm2hm\" (UniqueName: \"kubernetes.io/projected/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-kube-api-access-gm2hm\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.048713 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.048735 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.049321 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.063588 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.076013 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.081113 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.081176 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.081198 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.081331 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.081358 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.090481 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.105442 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.118147 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.129258 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.132710 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.145198 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.149157 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.149237 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm2hm\" (UniqueName: \"kubernetes.io/projected/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-kube-api-access-gm2hm\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.149277 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.149347 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.151094 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.151300 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.159905 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.161137 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.172747 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm2hm\" (UniqueName: \"kubernetes.io/projected/d144a9a2-3cd9-4ca7-9a18-631efc4ddea2-kube-api-access-gm2hm\") pod \"ovnkube-control-plane-749d76644c-dq9kl\" (UID: \"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184076 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184486 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184543 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.184571 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.196974 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.210505 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.246056 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.265570 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.284419 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.286644 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.286760 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.286856 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.286941 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.287016 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.295026 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.307176 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.318918 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.330629 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.339233 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.343856 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.352587 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.377796 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.390817 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.390888 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.390900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.390927 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.390964 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.392266 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.404142 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.415934 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.430867 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.441655 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.454503 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.464177 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.476710 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:39Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.493678 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.493727 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.493739 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.493756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.493769 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.596027 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.596079 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.596093 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.596112 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.596124 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.699786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.699900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.699922 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.699949 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.699969 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.803519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.803570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.803582 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.803598 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.803612 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.842778 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.842862 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.842872 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:39 crc kubenswrapper[4783]: E0930 13:35:39.842938 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:39 crc kubenswrapper[4783]: E0930 13:35:39.842996 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:39 crc kubenswrapper[4783]: E0930 13:35:39.843038 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.906828 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.906873 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.906882 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.906896 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:39 crc kubenswrapper[4783]: I0930 13:35:39.906906 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:39Z","lastTransitionTime":"2025-09-30T13:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.009866 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.009910 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.009923 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.009939 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.009951 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.112030 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.112083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.112100 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.112121 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.112138 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.130332 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-k69sq"] Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.131185 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.131347 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.135346 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" event={"ID":"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2","Type":"ContainerStarted","Data":"5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.135404 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" event={"ID":"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2","Type":"ContainerStarted","Data":"ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.135427 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" event={"ID":"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2","Type":"ContainerStarted","Data":"91af9d58799cf299787b7325bd1d6216ff662e89f53a04607b614e1502454023"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.137973 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/1.log" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.139012 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/0.log" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.143649 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2" exitCode=1 Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.143705 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.143779 4783 scope.go:117] "RemoveContainer" containerID="0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.144603 4783 scope.go:117] "RemoveContainer" containerID="36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2" Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.144814 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.155733 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.159385 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxtnb\" (UniqueName: \"kubernetes.io/projected/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-kube-api-access-hxtnb\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.159691 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.174662 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.191987 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.205876 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.215352 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.215389 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.215401 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.215419 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.215432 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.219880 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.231836 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.245917 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.259577 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.260403 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.260520 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxtnb\" (UniqueName: \"kubernetes.io/projected/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-kube-api-access-hxtnb\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.260681 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.260755 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:40.760730836 +0000 UTC m=+40.692197153 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.273154 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.280685 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxtnb\" (UniqueName: \"kubernetes.io/projected/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-kube-api-access-hxtnb\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.291557 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.308574 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.318666 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.318732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.318752 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.318779 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.318797 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.324363 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.336861 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.353121 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.373920 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.389072 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.405012 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.422787 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.422835 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.422858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.422884 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.422902 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.430969 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.451692 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.470725 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.485081 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.496711 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.513479 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526314 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526380 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526398 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526422 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526413 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.526443 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.539594 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.547863 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.562437 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.587067 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.598209 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.611677 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.629509 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.630008 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.630044 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.630056 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.630074 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.630087 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.652954 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.733252 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.733912 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.733990 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.734027 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.734056 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.767481 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.767664 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:40 crc kubenswrapper[4783]: E0930 13:35:40.767747 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:41.76772448 +0000 UTC m=+41.699190817 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.836941 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.836997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.837015 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.837033 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.837048 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.868334 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.886868 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.903707 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.929919 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.939160 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.939205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.939235 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.939253 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.939265 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:40Z","lastTransitionTime":"2025-09-30T13:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.949431 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.967622 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:40 crc kubenswrapper[4783]: I0930 13:35:40.984593 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.001435 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:40Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.014043 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.031351 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.041279 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.041312 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.041323 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.041339 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.041359 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.049621 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.063490 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.076296 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.087782 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.102963 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.114830 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:41Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.144047 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.144098 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.144109 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.144126 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.144139 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.147839 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/1.log" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.247435 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.247497 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.247525 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.247556 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.247578 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.350463 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.350532 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.350552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.350579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.350600 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.454314 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.454381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.454399 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.454427 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.454445 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.557426 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.557504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.557530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.557559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.557582 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.662039 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.662390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.662536 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.662738 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.662883 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.765720 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.766399 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.766477 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.766543 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.766603 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.776402 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.776540 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.776684 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:43.776636284 +0000 UTC m=+43.708102621 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.842289 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.842412 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.842423 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.842492 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.842649 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.842857 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.843103 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:41 crc kubenswrapper[4783]: E0930 13:35:41.843205 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.870265 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.870305 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.870321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.870345 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.870362 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.973564 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.973905 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.974085 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.974352 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:41 crc kubenswrapper[4783]: I0930 13:35:41.974493 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:41Z","lastTransitionTime":"2025-09-30T13:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.078180 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.078287 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.078307 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.078333 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.078352 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.181455 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.181526 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.181548 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.181579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.181600 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.284607 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.284672 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.284692 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.284715 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.284734 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.387722 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.387772 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.387788 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.387811 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.387833 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.489842 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.489921 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.489934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.489951 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.489963 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.592949 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.593014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.593031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.593056 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.593074 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.696187 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.696236 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.696244 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.696259 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.696268 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.798540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.798603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.798620 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.798647 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.798666 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.843783 4783 scope.go:117] "RemoveContainer" containerID="56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.903729 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.903771 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.903782 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.903797 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:42 crc kubenswrapper[4783]: I0930 13:35:42.903807 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:42Z","lastTransitionTime":"2025-09-30T13:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.006975 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.007011 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.007023 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.007041 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.007052 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.109781 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.109826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.109841 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.109858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.109869 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.162407 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.164814 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.165131 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.184214 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.197661 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213473 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213580 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213652 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213671 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.213903 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.231123 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.254995 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.271337 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.289854 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.307591 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.317174 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.317216 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.317246 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.317264 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.317276 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.326612 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.340011 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.356088 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.374353 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.403982 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419161 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419246 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419260 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419279 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419297 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.419540 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.437667 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.452150 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:43Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.522186 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.522295 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.522314 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.522338 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.522357 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.625103 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.625166 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.625185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.625209 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.625255 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.728670 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.728723 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.728734 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.728750 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.728762 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.799716 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.799912 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.800023 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:47.799989971 +0000 UTC m=+47.731456328 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.831492 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.831540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.831552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.831568 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.831580 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.843088 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.843127 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.843326 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.843418 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.843450 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.843551 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.843690 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:43 crc kubenswrapper[4783]: E0930 13:35:43.843855 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.934674 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.934746 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.934765 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.934792 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:43 crc kubenswrapper[4783]: I0930 13:35:43.934811 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:43Z","lastTransitionTime":"2025-09-30T13:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.037649 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.037702 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.037720 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.037744 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.037764 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.140911 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.141001 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.141036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.141066 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.141089 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.244541 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.244581 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.244592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.244616 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.244633 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.346615 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.346671 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.346686 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.346704 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.346719 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.450058 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.450110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.450122 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.450138 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.450151 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.553091 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.553120 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.553131 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.553150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.553167 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.656279 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.656333 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.656354 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.656378 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.656396 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.759136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.759194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.759211 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.759261 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.759279 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.862060 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.862130 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.862147 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.862168 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.862186 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.965267 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.965351 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.965370 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.965393 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:44 crc kubenswrapper[4783]: I0930 13:35:44.965410 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:44Z","lastTransitionTime":"2025-09-30T13:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.067873 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.067944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.067969 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.067999 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.068023 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.170876 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.170912 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.170923 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.170937 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.170946 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.274301 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.274339 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.274351 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.274366 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.274380 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.377137 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.377175 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.377186 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.377204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.377214 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.480124 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.480168 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.480178 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.480193 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.480203 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.582451 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.582508 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.582520 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.582537 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.582550 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.685117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.685141 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.685151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.685164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.685172 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.787023 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.787057 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.787067 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.787081 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.787093 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.842968 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.842979 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.842996 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.843114 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:45 crc kubenswrapper[4783]: E0930 13:35:45.843212 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:45 crc kubenswrapper[4783]: E0930 13:35:45.843298 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:45 crc kubenswrapper[4783]: E0930 13:35:45.843374 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:45 crc kubenswrapper[4783]: E0930 13:35:45.843529 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.889700 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.889738 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.889747 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.889761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.889771 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.992879 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.992931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.992948 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.992971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:45 crc kubenswrapper[4783]: I0930 13:35:45.992989 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:45Z","lastTransitionTime":"2025-09-30T13:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.095926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.095979 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.095995 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.096013 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.096025 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.198790 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.198850 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.198871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.198898 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.198922 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.300378 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.300417 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.300431 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.300447 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.300458 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.402119 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.402156 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.402167 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.402184 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.402195 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.505116 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.505153 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.505164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.505182 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.505196 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.608958 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.609459 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.609612 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.609756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.609939 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.716018 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.716069 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.716083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.716102 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.716120 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.819029 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.819288 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.819377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.819480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.819539 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.922940 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.923551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.923619 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.923679 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:46 crc kubenswrapper[4783]: I0930 13:35:46.923755 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:46Z","lastTransitionTime":"2025-09-30T13:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.026490 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.026593 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.026614 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.026635 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.026648 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.128683 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.128731 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.128742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.128759 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.128790 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.230883 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.230920 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.230930 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.230945 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.230953 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.333687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.333757 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.333769 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.333791 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.333807 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.435857 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.435882 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.435891 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.435902 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.435912 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.538440 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.538488 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.538500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.538518 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.538530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.641619 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.641664 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.641678 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.641699 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.641712 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.745722 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.745812 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.745837 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.745869 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.745891 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.842856 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.842900 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.843008 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.843041 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.843144 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.843430 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.843642 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.843803 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.844073 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.844238 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:47 crc kubenswrapper[4783]: E0930 13:35:47.844300 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:35:55.844282218 +0000 UTC m=+55.775748535 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.848087 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.848142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.848165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.848194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.848217 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.951037 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.951100 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.951117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.951140 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:47 crc kubenswrapper[4783]: I0930 13:35:47.951158 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:47Z","lastTransitionTime":"2025-09-30T13:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.053623 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.053672 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.053684 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.053702 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.053714 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.156312 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.156353 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.156361 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.156374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.156383 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.256396 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.256463 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.256483 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.256509 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.256530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.277363 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:48Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.281777 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.281871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.281890 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.281914 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.281932 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.303123 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:48Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.306950 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.306992 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.307003 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.307020 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.307031 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.321238 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:48Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.324899 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.324934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.324944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.324961 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.324971 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.341119 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:48Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.343981 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.344041 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.344055 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.344076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.344093 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.353997 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:48Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:48 crc kubenswrapper[4783]: E0930 13:35:48.354145 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.355384 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.355414 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.355422 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.355435 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.355444 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.458083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.458142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.458150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.458164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.458175 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.561514 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.561557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.561573 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.561594 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.561616 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.664762 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.664812 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.664826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.664847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.664862 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.768194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.768273 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.768290 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.768312 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.768329 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.870889 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.871024 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.871046 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.871065 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.871076 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.973761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.973803 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.973813 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.973829 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:48 crc kubenswrapper[4783]: I0930 13:35:48.973839 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:48Z","lastTransitionTime":"2025-09-30T13:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.075811 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.075857 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.075868 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.075886 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.075899 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.178288 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.178331 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.178346 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.178363 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.178375 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.281798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.281858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.281880 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.281953 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.281975 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.384828 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.384893 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.384910 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.384933 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.384953 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.488266 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.488349 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.488371 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.488393 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.488408 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.591145 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.591194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.591206 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.591244 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.591260 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.694276 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.694331 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.694342 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.694364 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.694376 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.797495 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.797542 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.797558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.797578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.797590 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.842518 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.842606 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.842562 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.842562 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:49 crc kubenswrapper[4783]: E0930 13:35:49.842701 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:49 crc kubenswrapper[4783]: E0930 13:35:49.842843 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:49 crc kubenswrapper[4783]: E0930 13:35:49.842913 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:49 crc kubenswrapper[4783]: E0930 13:35:49.842985 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.901913 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.902030 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.902057 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.902090 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:49 crc kubenswrapper[4783]: I0930 13:35:49.902137 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:49Z","lastTransitionTime":"2025-09-30T13:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.005502 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.005546 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.005560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.005579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.005598 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.108237 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.108278 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.108292 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.108315 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.108329 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.210921 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.210981 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.210998 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.211021 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.211039 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.313437 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.313498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.313521 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.313550 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.313567 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.415471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.415519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.415542 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.415565 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.415581 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.518552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.518615 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.518633 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.518656 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.518676 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.620787 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.620838 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.620853 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.620873 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.620887 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.723986 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.724040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.724053 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.724069 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.724081 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.826337 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.826376 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.826388 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.826404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.826415 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.858156 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.874703 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.889354 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.905409 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.924447 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.928310 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.928344 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.928355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.928372 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.928385 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:50Z","lastTransitionTime":"2025-09-30T13:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.945729 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.962901 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.976268 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:50 crc kubenswrapper[4783]: I0930 13:35:50.988999 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:50Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.010315 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ad14312ab0209c838067eeadd64f9cd91c637094f773fa51218180845f09fc7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:36Z\\\",\\\"message\\\":\\\"ent handler 8 for removal\\\\nI0930 13:35:36.436401 6088 factory.go:656] Stopping watch factory\\\\nI0930 13:35:36.436405 6088 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:36.436415 6088 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:36.436439 6088 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:36.436319 6088 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436528 6088 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436453 6088 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 13:35:36.436764 6088 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 13:35:36.437101 6088 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.022721 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.030914 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.030971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.030981 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.030997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.031007 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.037916 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.052411 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.064603 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.075915 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.090361 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:51Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.133192 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.133257 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.133267 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.133281 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.133296 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.236578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.236623 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.236634 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.236649 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.236663 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.339596 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.339633 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.339642 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.339657 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.339666 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.443997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.444049 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.444059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.444074 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.444084 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.546730 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.546759 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.546766 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.546799 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.546808 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.651016 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.651086 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.651110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.651139 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.651163 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.753510 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.753550 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.753561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.753578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.753588 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.842373 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.842444 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.842472 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:51 crc kubenswrapper[4783]: E0930 13:35:51.842523 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.842586 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:51 crc kubenswrapper[4783]: E0930 13:35:51.842748 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:51 crc kubenswrapper[4783]: E0930 13:35:51.842933 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:51 crc kubenswrapper[4783]: E0930 13:35:51.843023 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.856331 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.856382 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.856394 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.856412 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.856425 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.958780 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.958866 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.958893 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.958924 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:51 crc kubenswrapper[4783]: I0930 13:35:51.958948 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:51Z","lastTransitionTime":"2025-09-30T13:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.061321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.061359 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.061368 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.061381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.061390 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.164778 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.164819 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.164828 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.164842 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.164850 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.267446 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.267488 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.267500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.267516 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.267528 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.370788 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.370854 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.370871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.370898 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.370919 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.473560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.473626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.473637 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.473651 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.473661 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.576340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.576376 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.576387 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.576404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.576416 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.678885 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.678961 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.678973 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.678991 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.679003 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.782093 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.782166 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.782190 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.782212 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.782256 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.885448 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.885522 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.885545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.885574 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.885597 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.988779 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.988826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.988837 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.988854 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:52 crc kubenswrapper[4783]: I0930 13:35:52.988865 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:52Z","lastTransitionTime":"2025-09-30T13:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.091825 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.091882 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.091900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.091925 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.091945 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.195622 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.195650 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.195659 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.195673 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.195681 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.298286 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.298327 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.298336 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.298350 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.298360 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.400840 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.400885 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.400897 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.400917 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.400928 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.503537 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.503598 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.503842 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.503899 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.503917 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.607617 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.607698 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.607727 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.607756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.607776 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.657510 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.657655 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.657707 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:36:25.657649782 +0000 UTC m=+85.589116129 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.657788 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.657830 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.657845 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:25.657828448 +0000 UTC m=+85.589294795 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.657952 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.658008 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:25.657995213 +0000 UTC m=+85.589461520 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.711965 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.712025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.712047 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.712075 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.712096 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.758975 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.759075 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759336 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759345 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759371 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759388 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759393 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759409 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759487 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:25.75945718 +0000 UTC m=+85.690923527 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.759524 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:25.759505932 +0000 UTC m=+85.690972279 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.815079 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.815115 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.815147 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.815164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.815174 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.842943 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.843012 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.843080 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.843024 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.843204 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.843177 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.843351 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:53 crc kubenswrapper[4783]: E0930 13:35:53.843400 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.918148 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.918189 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.918203 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.918243 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:53 crc kubenswrapper[4783]: I0930 13:35:53.918284 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:53Z","lastTransitionTime":"2025-09-30T13:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.022406 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.022474 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.022491 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.022513 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.022530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.125924 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.125988 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.126005 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.126031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.126052 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.228675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.228743 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.228761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.228786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.228805 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.331798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.331916 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.331938 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.331964 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.331983 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.434711 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.434750 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.434761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.434778 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.434789 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.537603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.537674 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.537691 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.537716 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.537734 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.640082 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.640111 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.640121 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.640135 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.640147 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.742320 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.742614 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.742851 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.743048 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.743285 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.833194 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.834655 4783 scope.go:117] "RemoveContainer" containerID="36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.846558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.846751 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.846833 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.846892 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.846915 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.860073 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.877814 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.899457 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.917821 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.933532 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.944556 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.949480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.949730 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.949753 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.949781 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.949814 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:54Z","lastTransitionTime":"2025-09-30T13:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.965534 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:54 crc kubenswrapper[4783]: I0930 13:35:54.996372 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:54Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.011644 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.034581 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.053785 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.053851 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.053876 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.053908 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.053936 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.057326 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.077490 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.093547 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.116442 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.128110 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.139607 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.156272 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.156321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.156337 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.156356 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.156370 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.205630 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/1.log" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.209146 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.209646 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.232925 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.249173 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.258671 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.258926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.259080 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.259248 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.259371 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.264203 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.289548 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.308121 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.332678 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.343473 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.356469 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.361406 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.361445 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.361453 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.361467 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.361477 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.372681 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.382185 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.391896 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.401185 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.412700 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.422533 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.436685 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.446623 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.463349 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.463570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.463654 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.463754 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.463848 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.567083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.567148 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.567166 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.567191 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.567209 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.670826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.671348 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.671508 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.671653 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.671816 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.774402 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.774449 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.774464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.774480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.774493 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.842774 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.843166 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.843465 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.843626 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.843554 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.843884 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.843970 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.844014 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.875988 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.876811 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.876834 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.876843 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.876856 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.876865 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.877198 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:55 crc kubenswrapper[4783]: E0930 13:35:55.877378 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:11.877354572 +0000 UTC m=+71.808820899 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.936297 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.949493 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.951807 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.961884 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.971426 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.978582 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.978610 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.978620 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.978634 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.978644 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:55Z","lastTransitionTime":"2025-09-30T13:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:55 crc kubenswrapper[4783]: I0930 13:35:55.983777 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.000892 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:55Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.011936 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.022133 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.032558 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.042926 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.054818 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.067491 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080442 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080511 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080527 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080543 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080553 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.080999 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.091190 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.100651 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.120967 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.133458 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.183562 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.183616 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.183632 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.183655 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.183669 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.215277 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/2.log" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.216395 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/1.log" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.220840 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" exitCode=1 Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.220892 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.220974 4783 scope.go:117] "RemoveContainer" containerID="36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.222114 4783 scope.go:117] "RemoveContainer" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" Sep 30 13:35:56 crc kubenswrapper[4783]: E0930 13:35:56.222602 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.242605 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.256456 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.278029 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.286555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.286601 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.286616 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.286635 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.286654 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.292197 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.306858 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.317432 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.328103 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.345118 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.357463 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.367774 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.377815 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.389391 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.390920 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.390958 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.390983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.391005 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.391020 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.404012 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.426465 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36b1e764ae0ef37fe5dbf9529b1728166fd8cb11d02bfd1e52c18a15a988f1b2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"message\\\":\\\"-24T17:21:41Z]\\\\nI0930 13:35:39.118387 6230 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0930 13:35:39.118379 6230 services_controller.go:434] Service openshift-cluster-version/cluster-version-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{cluster-version-operator openshift-cluster-version ddf4933a-f532-4906-9b8f-3b15aa433264 6187 0 2025-02-23 05:11:57 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:cluster-version-operator] map[exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true kubernetes.io/description:Expose cluster-version operator metrics to other in-cluster consumers. Access requires a prometheus-k8s RoleBinding in this namespace. service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:cluster-version-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e327 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Nam\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.438700 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.451460 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.463788 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:56Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.492964 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.493011 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.493022 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.493037 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.493048 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.595502 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.595546 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.595559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.595575 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.595586 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.698025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.698096 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.698115 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.698142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.698161 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.800888 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.800972 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.800996 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.801029 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.801046 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.904080 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.904134 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.904151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.904175 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:56 crc kubenswrapper[4783]: I0930 13:35:56.904193 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:56Z","lastTransitionTime":"2025-09-30T13:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.007523 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.007551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.007561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.007573 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.007582 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.110819 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.110905 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.110955 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.110995 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.111018 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.213197 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.213298 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.213316 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.213345 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.213396 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.226126 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/2.log" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.230263 4783 scope.go:117] "RemoveContainer" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" Sep 30 13:35:57 crc kubenswrapper[4783]: E0930 13:35:57.230586 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.246844 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.270867 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.288870 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.310071 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.315860 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.315903 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.315915 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.315938 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.315951 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.329889 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.344260 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.358906 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.375923 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.393256 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.417334 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.418828 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.418894 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.418910 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.418931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.418946 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.435827 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.456959 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.471639 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.490720 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.511580 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.521464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.521564 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.521584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.521608 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.521627 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.524429 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.537784 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.624875 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.624937 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.624955 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.624978 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.624995 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.728043 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.728118 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.728140 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.728169 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.728191 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.830902 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.831005 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.831021 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.831044 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.831064 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.842370 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.842443 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.842445 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:57 crc kubenswrapper[4783]: E0930 13:35:57.842556 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.842633 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:57 crc kubenswrapper[4783]: E0930 13:35:57.842773 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:57 crc kubenswrapper[4783]: E0930 13:35:57.842968 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:57 crc kubenswrapper[4783]: E0930 13:35:57.843108 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.934494 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.934558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.934576 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.934602 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.934618 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:57Z","lastTransitionTime":"2025-09-30T13:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.964941 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 13:35:57 crc kubenswrapper[4783]: I0930 13:35:57.983541 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:57Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.003247 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.022344 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.037645 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.037720 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.037742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.037775 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.037800 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.047323 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.066185 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.087094 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.101654 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.117614 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.136733 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.141245 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.141295 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.141309 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.141326 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.141504 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.160989 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.178702 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.198300 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.215322 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.237845 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.244525 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.244570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.244579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.244594 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.244605 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.271468 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.288904 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.309163 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.347055 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.347085 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.347094 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.347105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.347112 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.449804 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.449855 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.449871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.449893 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.449909 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.513443 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.513505 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.513522 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.513544 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.513563 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.535819 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.541687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.541754 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.541775 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.541805 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.541828 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.563012 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.569897 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.569998 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.570025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.570055 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.570078 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.589540 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.593189 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.593248 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.593260 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.593278 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.593290 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.608060 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.611931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.612170 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.612340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.612469 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.612607 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.627974 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:35:58Z is after 2025-08-24T17:21:41Z" Sep 30 13:35:58 crc kubenswrapper[4783]: E0930 13:35:58.628164 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.629943 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.629976 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.629989 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.630005 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.630017 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.733385 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.733536 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.733562 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.733584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.733614 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.835929 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.836177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.836279 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.836395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.836476 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.939589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.939651 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.939660 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.939687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:58 crc kubenswrapper[4783]: I0930 13:35:58.939696 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:58Z","lastTransitionTime":"2025-09-30T13:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.041309 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.041367 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.041376 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.041389 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.041397 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.144367 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.144442 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.144454 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.144475 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.144489 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.247847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.247887 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.247895 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.247908 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.247920 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.351837 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.351894 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.351910 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.351932 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.351952 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.455212 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.455320 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.455338 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.455363 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.455381 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.560156 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.560185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.560194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.560210 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.560242 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.663166 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.663213 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.663264 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.663303 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.663315 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.766264 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.766327 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.766347 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.766369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.766396 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.842974 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.843104 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:35:59 crc kubenswrapper[4783]: E0930 13:35:59.843194 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.843278 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:35:59 crc kubenswrapper[4783]: E0930 13:35:59.843393 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:35:59 crc kubenswrapper[4783]: E0930 13:35:59.843458 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.843794 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:35:59 crc kubenswrapper[4783]: E0930 13:35:59.844089 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.869059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.869092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.869101 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.869113 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.869122 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.971721 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.971780 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.971800 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.971823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:35:59 crc kubenswrapper[4783]: I0930 13:35:59.971843 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:35:59Z","lastTransitionTime":"2025-09-30T13:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.075369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.075425 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.075438 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.075457 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.075469 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.178315 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.178368 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.178388 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.178413 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.178431 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.281482 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.281542 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.281559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.281583 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.281599 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.384625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.384683 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.384703 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.384735 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.384754 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.487955 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.488081 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.488108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.488136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.488158 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.591444 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.591532 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.591556 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.591592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.591616 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.694750 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.694806 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.694823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.694847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.694864 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.797004 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.797065 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.797086 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.797109 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.797127 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.866511 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.880633 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.897928 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.900451 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.900520 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.900539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.900566 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.900585 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:00Z","lastTransitionTime":"2025-09-30T13:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.922386 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.941823 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.960244 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.970930 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.983433 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:00 crc kubenswrapper[4783]: I0930 13:36:00.997756 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:00Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.003539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.003591 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.003658 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.003688 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.003706 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.014866 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.029908 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.042653 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.059269 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.074409 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.096607 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.105702 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.105738 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.105749 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.105766 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.105779 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.113623 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.125683 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:01Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.208187 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.208255 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.208271 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.208291 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.208305 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.311031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.311084 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.311105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.311131 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.311153 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.414646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.414719 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.414742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.414770 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.414794 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.518555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.518614 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.518633 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.518656 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.518674 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.621739 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.621819 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.621837 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.621862 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.621883 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.724935 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.725014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.725036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.725068 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.725095 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.828637 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.828700 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.828716 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.828747 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.828764 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.842150 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.842329 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:01 crc kubenswrapper[4783]: E0930 13:36:01.842398 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.842171 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.842176 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:01 crc kubenswrapper[4783]: E0930 13:36:01.842529 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:01 crc kubenswrapper[4783]: E0930 13:36:01.842660 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:01 crc kubenswrapper[4783]: E0930 13:36:01.842798 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.931576 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.931611 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.931619 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.931631 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:01 crc kubenswrapper[4783]: I0930 13:36:01.931640 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:01Z","lastTransitionTime":"2025-09-30T13:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.033886 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.033952 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.033965 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.034007 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.034020 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.136515 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.136548 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.136555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.136568 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.136576 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.240199 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.240335 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.240362 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.240404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.240432 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.346739 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.346845 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.346865 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.346892 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.346919 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.449655 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.449688 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.449698 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.449712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.449722 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.553447 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.553534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.553557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.553589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.553612 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.657190 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.657297 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.657314 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.657358 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.657375 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.760319 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.760369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.760380 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.760397 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.760410 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.864732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.864790 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.864807 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.864831 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.864850 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.967359 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.967408 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.967420 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.967437 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:02 crc kubenswrapper[4783]: I0930 13:36:02.967448 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:02Z","lastTransitionTime":"2025-09-30T13:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.071619 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.071663 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.071675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.071695 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.071709 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.174016 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.174067 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.174083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.174105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.174122 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.277528 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.277622 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.277642 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.277664 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.277686 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.380563 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.380624 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.380640 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.380660 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.380678 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.483740 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.483787 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.483795 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.483813 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.483823 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.586997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.587092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.587108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.587133 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.587150 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.689164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.689259 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.689277 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.689296 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.689310 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.792277 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.792334 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.792355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.792378 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.792394 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.843012 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.843101 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.843101 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.843037 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:03 crc kubenswrapper[4783]: E0930 13:36:03.843254 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:03 crc kubenswrapper[4783]: E0930 13:36:03.843342 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:03 crc kubenswrapper[4783]: E0930 13:36:03.843451 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:03 crc kubenswrapper[4783]: E0930 13:36:03.843608 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.895335 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.895395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.895412 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.895436 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.895452 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.997646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.997704 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.997734 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.997756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:03 crc kubenswrapper[4783]: I0930 13:36:03.997769 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:03Z","lastTransitionTime":"2025-09-30T13:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.100124 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.100271 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.100309 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.100347 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.100368 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.203479 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.203560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.203581 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.203611 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.203633 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.306557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.306628 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.306650 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.306677 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.306701 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.408693 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.408761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.408783 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.408810 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.408835 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.511675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.511755 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.511778 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.511806 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.511828 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.614280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.614349 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.614367 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.614390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.614408 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.717747 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.717822 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.717845 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.717874 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.717894 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.820177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.820301 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.820321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.820345 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.820367 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.922795 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.922841 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.922854 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.922871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:04 crc kubenswrapper[4783]: I0930 13:36:04.922884 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:04Z","lastTransitionTime":"2025-09-30T13:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.025477 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.025561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.025574 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.025592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.025628 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.129988 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.130040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.130050 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.130069 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.130083 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.232747 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.232786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.232794 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.232809 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.232819 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.335040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.335110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.335125 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.335142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.335151 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.437511 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.437557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.437569 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.437583 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.437592 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.540368 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.540415 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.540426 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.540441 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.540452 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.642987 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.643096 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.643123 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.643160 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.643182 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.745447 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.745493 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.745504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.745523 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.745535 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.842717 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:05 crc kubenswrapper[4783]: E0930 13:36:05.842873 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.843137 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:05 crc kubenswrapper[4783]: E0930 13:36:05.843314 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.843544 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:05 crc kubenswrapper[4783]: E0930 13:36:05.843658 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.844105 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:05 crc kubenswrapper[4783]: E0930 13:36:05.844258 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.848051 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.848090 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.848108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.848127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.848143 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.950212 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.950300 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.950318 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.950342 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:05 crc kubenswrapper[4783]: I0930 13:36:05.950360 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:05Z","lastTransitionTime":"2025-09-30T13:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.052624 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.052668 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.052679 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.052696 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.052708 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.155144 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.155186 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.155196 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.155211 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.155243 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.258381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.258419 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.258428 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.258444 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.258455 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.360349 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.360406 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.360422 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.360442 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.360459 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.462838 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.462872 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.462883 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.462899 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.462910 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.564692 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.564752 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.564766 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.564783 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.564791 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.667867 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.667902 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.667915 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.667931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.667944 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.771041 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.771074 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.771083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.771098 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.771108 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.873451 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.873533 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.873558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.873588 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.873614 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.975502 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.975549 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.975560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.975578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:06 crc kubenswrapper[4783]: I0930 13:36:06.975589 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:06Z","lastTransitionTime":"2025-09-30T13:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.077997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.078036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.078045 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.078058 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.078068 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.181056 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.181155 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.181176 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.181260 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.181290 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.284872 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.284962 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.284985 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.285010 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.285032 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.387314 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.387374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.387395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.387421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.387439 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.489983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.490034 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.490045 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.490063 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.490076 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.592887 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.592936 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.592947 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.592965 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.592979 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.700694 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.700762 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.700780 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.700805 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.700823 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.803506 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.803549 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.803567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.803590 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.803607 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.843852 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.843998 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.844022 4783 scope.go:117] "RemoveContainer" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.844119 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:07 crc kubenswrapper[4783]: E0930 13:36:07.844289 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:07 crc kubenswrapper[4783]: E0930 13:36:07.844431 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:07 crc kubenswrapper[4783]: E0930 13:36:07.844485 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:07 crc kubenswrapper[4783]: E0930 13:36:07.844527 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.844565 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:07 crc kubenswrapper[4783]: E0930 13:36:07.844798 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.906441 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.906486 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.906498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.906518 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:07 crc kubenswrapper[4783]: I0930 13:36:07.906532 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:07Z","lastTransitionTime":"2025-09-30T13:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.009552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.009606 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.009622 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.009649 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.009665 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.112302 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.112366 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.112384 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.112407 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.112425 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.215515 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.215558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.215566 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.215582 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.215591 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.317984 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.318013 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.318021 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.318035 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.318044 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.420898 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.420941 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.420951 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.420967 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.420979 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.524385 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.524457 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.524478 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.524507 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.524529 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.628009 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.628077 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.628119 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.628149 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.628168 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.730960 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.731032 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.731050 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.731076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.731092 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.833675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.833712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.833721 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.833733 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.833743 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.935988 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.936052 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.936093 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.936280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.936312 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.996005 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.996340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.996463 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.996601 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:08 crc kubenswrapper[4783]: I0930 13:36:08.996716 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:08Z","lastTransitionTime":"2025-09-30T13:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.012309 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:09Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.016876 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.016937 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.016959 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.016986 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.017009 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.035894 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:09Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.042055 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.042101 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.042116 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.042136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.042149 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.054832 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:09Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.059268 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.059301 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.059317 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.059334 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.059348 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.092478 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:09Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.105710 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.105764 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.105778 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.105796 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.105814 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.124240 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:09Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.124361 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.125767 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.125811 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.125820 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.125834 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.125844 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.228340 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.228416 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.228433 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.228456 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.228474 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.331489 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.331610 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.331625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.331641 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.331653 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.433646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.433700 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.433711 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.433727 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.433738 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.535857 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.535900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.535917 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.535931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.535942 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.638131 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.638158 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.638165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.638178 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.638188 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.740636 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.740684 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.740693 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.740706 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.740714 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.841972 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.842077 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842246 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.842296 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842396 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.842450 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842706 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:09 crc kubenswrapper[4783]: E0930 13:36:09.842819 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842922 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842937 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842955 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.842963 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.945061 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.945104 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.945116 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.945136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:09 crc kubenswrapper[4783]: I0930 13:36:09.945152 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:09Z","lastTransitionTime":"2025-09-30T13:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.047041 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.047082 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.047095 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.047113 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.047125 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.151311 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.151377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.151390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.151450 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.151482 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.254134 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.254167 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.254175 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.254188 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.254196 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.355990 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.356035 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.356043 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.356057 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.356067 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.459000 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.459070 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.459086 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.459100 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.459109 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.561498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.561540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.561553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.561571 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.561582 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.663788 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.663831 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.663840 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.663854 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.663864 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.766025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.766063 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.766073 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.766088 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.766100 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.854559 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.865291 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.868284 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.868384 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.868397 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.868419 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.868431 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.881108 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.903606 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.914091 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.932399 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.943757 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.954368 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.967470 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.975404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.976835 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.976852 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.976893 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.977093 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:10Z","lastTransitionTime":"2025-09-30T13:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:10 crc kubenswrapper[4783]: I0930 13:36:10.992203 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:10Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.019334 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.035390 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.052288 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.067859 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.079208 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.080506 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.080615 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.081263 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.081369 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.081449 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.094971 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.106001 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:11Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.184648 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.185132 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.185341 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.185696 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.186096 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.289421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.289466 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.289482 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.289502 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.289520 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.391987 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.392071 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.392087 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.392108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.392124 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.495036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.495333 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.495569 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.495711 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.495843 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.598395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.599388 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.599467 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.599545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.599679 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.702507 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.702586 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.702603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.702660 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.702686 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.805901 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.806404 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.806621 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.806804 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.806966 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.842658 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.842703 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.842976 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.842745 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.842703 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.843175 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.843659 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.843550 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.910322 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.910501 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.910523 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.910554 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.910573 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:11Z","lastTransitionTime":"2025-09-30T13:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:11 crc kubenswrapper[4783]: I0930 13:36:11.946540 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.946747 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:36:11 crc kubenswrapper[4783]: E0930 13:36:11.947058 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:36:43.947025283 +0000 UTC m=+103.878491630 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.013970 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.014040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.014059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.014085 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.014103 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.117123 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.117185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.117205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.117237 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.117251 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.219741 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.220063 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.220286 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.220433 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.220569 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.322765 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.322814 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.322830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.322853 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.322871 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.425625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.425685 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.425703 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.425725 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.425741 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.528252 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.528316 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.528332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.528355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.528371 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.630492 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.630557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.630574 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.630603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.630620 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.733615 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.733888 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.733984 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.734078 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.734171 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.836203 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.836415 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.836498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.836571 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.836648 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.940328 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.940367 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.940378 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.940393 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:12 crc kubenswrapper[4783]: I0930 13:36:12.940404 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:12Z","lastTransitionTime":"2025-09-30T13:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.044549 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.044627 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.044646 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.044668 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.044685 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.147421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.147472 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.147484 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.147501 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.147513 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.250360 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.250420 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.250437 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.250459 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.250477 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.353297 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.353349 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.353366 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.353389 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.353406 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.455993 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.456355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.456372 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.456396 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.456413 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.559357 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.559409 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.559439 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.559460 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.559473 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.661098 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.661670 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.661755 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.661817 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.661877 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.765070 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.765110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.765125 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.765140 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.765151 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.842986 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.843022 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:13 crc kubenswrapper[4783]: E0930 13:36:13.843688 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.843097 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.843043 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:13 crc kubenswrapper[4783]: E0930 13:36:13.843857 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:13 crc kubenswrapper[4783]: E0930 13:36:13.844317 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:13 crc kubenswrapper[4783]: E0930 13:36:13.844556 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.866980 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.867010 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.867037 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.867052 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.867060 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.970016 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.970094 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.970117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.970144 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:13 crc kubenswrapper[4783]: I0930 13:36:13.970166 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:13Z","lastTransitionTime":"2025-09-30T13:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.072744 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.072823 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.072836 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.072850 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.072859 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.175665 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.175707 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.175718 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.175735 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.175747 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.277754 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.277818 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.277830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.277846 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.277856 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.282509 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/0.log" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.282587 4783 generic.go:334] "Generic (PLEG): container finished" podID="e4186982-08f1-4809-be4f-25f86353ccf1" containerID="db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044" exitCode=1 Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.282620 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerDied","Data":"db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.283118 4783 scope.go:117] "RemoveContainer" containerID="db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.296815 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.308842 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.326235 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.343589 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.357911 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.369387 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.380876 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.381092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.381192 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.381316 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.381398 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.382144 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.393254 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.407395 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.419416 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.431768 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.443831 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.458622 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.468659 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.480669 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.484685 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.484721 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.484732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.484748 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.484759 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.494812 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.504571 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:14Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.586675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.586701 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.586710 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.586722 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.586730 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.689059 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.689092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.689102 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.689117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.689126 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.792531 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.792566 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.792600 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.792618 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.792629 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.894988 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.895289 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.895377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.895466 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.895572 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.997679 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.997714 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.997726 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.997746 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:14 crc kubenswrapper[4783]: I0930 13:36:14.997761 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:14Z","lastTransitionTime":"2025-09-30T13:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.100496 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.100603 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.100627 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.100683 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.100706 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.204214 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.204318 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.204345 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.204377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.204400 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.288612 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/0.log" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.288726 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerStarted","Data":"70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.308684 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.311717 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.311760 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.311777 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.311798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.311814 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.326796 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.346218 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.364352 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.388577 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.406988 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.421172 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.421268 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.421297 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.421551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.421589 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.429534 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.449419 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.460480 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.475560 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.499113 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.517563 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.525469 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.525521 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.525539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.525565 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.525587 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.537473 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.552560 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.565651 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.582995 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.602848 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:15Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.627975 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.628009 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.628019 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.628034 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.628045 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.731567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.731624 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.731642 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.731666 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.731684 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.836037 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.836104 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.836123 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.836147 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.836164 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.842820 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.842865 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.842933 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:15 crc kubenswrapper[4783]: E0930 13:36:15.843074 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.843134 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:15 crc kubenswrapper[4783]: E0930 13:36:15.843293 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:15 crc kubenswrapper[4783]: E0930 13:36:15.843365 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:15 crc kubenswrapper[4783]: E0930 13:36:15.843525 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.939371 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.939449 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.939470 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.939496 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:15 crc kubenswrapper[4783]: I0930 13:36:15.939518 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:15Z","lastTransitionTime":"2025-09-30T13:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.048758 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.048839 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.048862 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.048891 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.048908 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.152382 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.152450 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.152465 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.152484 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.152503 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.255379 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.255435 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.255454 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.255478 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.255496 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.358136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.358179 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.358191 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.358207 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.358237 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.460983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.461017 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.461026 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.461057 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.461065 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.563843 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.563894 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.563908 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.563925 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.563937 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.667411 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.667464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.667481 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.667503 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.667521 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.770909 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.770963 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.770980 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.771027 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.771044 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.873541 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.873592 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.873605 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.873626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.873637 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.977127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.977181 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.977201 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.977267 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:16 crc kubenswrapper[4783]: I0930 13:36:16.977305 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:16Z","lastTransitionTime":"2025-09-30T13:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.080531 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.080587 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.080604 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.080626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.080646 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.183400 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.183458 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.183476 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.183500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.183517 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.286620 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.286706 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.286724 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.286746 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.286764 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.389895 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.389944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.389961 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.389983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.389998 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.497512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.497579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.497598 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.497625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.497648 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.602205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.602341 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.602364 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.602482 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.602518 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.706386 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.706470 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.706490 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.706516 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.706533 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.810113 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.810187 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.810205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.810257 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.810275 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.842838 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:17 crc kubenswrapper[4783]: E0930 13:36:17.843032 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.843104 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.843185 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:17 crc kubenswrapper[4783]: E0930 13:36:17.843325 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.842838 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:17 crc kubenswrapper[4783]: E0930 13:36:17.843508 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:17 crc kubenswrapper[4783]: E0930 13:36:17.843701 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.912732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.912790 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.912808 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.912831 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:17 crc kubenswrapper[4783]: I0930 13:36:17.912849 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:17Z","lastTransitionTime":"2025-09-30T13:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.016058 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.016142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.016163 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.016187 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.016206 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.118957 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.119014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.119050 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.119083 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.119107 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.222504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.222567 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.222584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.222606 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.222624 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.327847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.327921 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.327944 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.327972 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.327997 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.430766 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.430813 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.430825 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.430841 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.430854 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.533847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.533901 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.533920 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.533945 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.533963 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.636598 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.636656 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.636673 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.636697 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.636714 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.740330 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.740403 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.740424 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.740448 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.740469 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843544 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843632 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843661 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843682 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.843910 4783 scope.go:117] "RemoveContainer" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.945852 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.945880 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.945888 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.945901 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:18 crc kubenswrapper[4783]: I0930 13:36:18.945910 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:18Z","lastTransitionTime":"2025-09-30T13:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.048744 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.048859 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.048883 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.048907 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.048962 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.134693 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.134737 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.134756 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.134776 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.134791 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.156116 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.160764 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.160804 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.160821 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.160843 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.160858 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.180900 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.187555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.187625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.187650 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.187679 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.187701 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.208165 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.215507 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.215591 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.215609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.216381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.216479 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.235532 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.240415 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.240525 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.240540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.240557 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.240570 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.259151 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.259538 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.262403 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.262456 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.262481 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.262512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.262537 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.303955 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/2.log" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.305725 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.306053 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.319637 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.335613 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.351334 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.361362 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.364126 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.364165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.364175 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.364189 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.364198 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.372469 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.386656 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.396949 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.413578 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.424101 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.436401 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.446514 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.456676 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.466830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.466864 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.466873 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.466885 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.466894 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.480817 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.490527 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.501858 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.513449 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.524190 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.569148 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.569190 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.569200 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.569214 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.569250 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.672023 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.672067 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.672076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.672090 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.672098 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.775066 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.775150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.775172 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.775211 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.775265 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.842073 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.842177 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.842252 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.842368 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.842433 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.842579 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.842767 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:19 crc kubenswrapper[4783]: E0930 13:36:19.842819 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.877616 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.877670 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.877688 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.877710 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.877727 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.981169 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.981261 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.981280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.981309 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:19 crc kubenswrapper[4783]: I0930 13:36:19.981335 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:19Z","lastTransitionTime":"2025-09-30T13:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.083827 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.083882 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.083900 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.083922 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.083938 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.187930 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.187991 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.188004 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.188031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.188069 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.292146 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.292270 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.292304 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.292335 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.292357 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.312083 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/3.log" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.312777 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/2.log" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.316609 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" exitCode=1 Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.316653 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.316776 4783 scope.go:117] "RemoveContainer" containerID="8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.317441 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:36:20 crc kubenswrapper[4783]: E0930 13:36:20.317630 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.340507 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.353579 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.370002 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.383292 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.393398 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.395876 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.395909 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.395918 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.395931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.395939 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.404441 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.421437 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.435852 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.448362 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.458504 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.477439 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498407 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:20Z\\\",\\\"message\\\":\\\"ner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 13:36:19.743730 6821 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:36:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498817 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498852 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498868 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498888 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.498902 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.511824 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.525599 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.543849 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.555957 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.569524 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.601858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.601909 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.601926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.601952 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.601969 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.704825 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.704859 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.704866 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.704879 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.704888 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.807006 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.807066 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.807090 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.807119 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.807140 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.857883 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.858433 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.873505 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.890948 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.910500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.910553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.910566 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.910581 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.910593 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:20Z","lastTransitionTime":"2025-09-30T13:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.911119 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.926433 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.939333 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.950521 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.967152 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:20 crc kubenswrapper[4783]: I0930 13:36:20.984642 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c8d7fd5132a5052526307e4bac5da9914674ec53ba6b422fe7d350edbae7a8f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"message\\\":\\\".EgressIP event handler 8 for removal\\\\nI0930 13:35:55.756192 6453 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 13:35:55.756206 6453 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 13:35:55.756254 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 13:35:55.756268 6453 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 13:35:55.756289 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 13:35:55.756297 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 13:35:55.756347 6453 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 13:35:55.756354 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 13:35:55.756361 6453 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 13:35:55.756371 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 13:35:55.756407 6453 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 13:35:55.756432 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 13:35:55.756415 6453 factory.go:656] Stopping watch factory\\\\nI0930 13:35:55.756480 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0930 13:35:55.756510 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0930 13:35:55.756632 6453 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:20Z\\\",\\\"message\\\":\\\"ner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 13:36:19.743730 6821 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:36:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:20Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.002936 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.013179 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.013238 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.013254 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.013276 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.013291 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.018144 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.034320 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.050891 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.069538 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.091144 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.107129 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.118829 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.118894 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.118911 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.118934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.118951 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.127008 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.220981 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.221018 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.221027 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.221039 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.221047 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324453 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324548 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324560 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324471 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/3.log" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324583 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.324682 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.328947 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:36:21 crc kubenswrapper[4783]: E0930 13:36:21.329185 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.341342 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.353271 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.369415 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.379133 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.391084 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.401458 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.419493 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.427076 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.427120 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.427133 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.427150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.427161 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.442630 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:20Z\\\",\\\"message\\\":\\\"ner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 13:36:19.743730 6821 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.454540 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.465824 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.476951 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.489831 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.503241 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.520123 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.531152 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.531196 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.531208 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.531241 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.531254 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.533389 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.542962 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c9dc0f0-f864-4694-85e0-36831b03a073\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0babf244c6e52dd22c0ff4cac80c59e3648465b07b01fb3efa928bd51e420f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.555376 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.569840 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:21Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.633813 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.633849 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.633857 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.633871 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.633883 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.737142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.737204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.737255 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.737282 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.737300 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.840867 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.840998 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.841078 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.841113 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.841183 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.842596 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.842971 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.842833 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.842878 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:21 crc kubenswrapper[4783]: E0930 13:36:21.843095 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:21 crc kubenswrapper[4783]: E0930 13:36:21.843206 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:21 crc kubenswrapper[4783]: E0930 13:36:21.843344 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:21 crc kubenswrapper[4783]: E0930 13:36:21.843446 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.943914 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.943995 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.944014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.944038 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:21 crc kubenswrapper[4783]: I0930 13:36:21.944056 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:21Z","lastTransitionTime":"2025-09-30T13:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.047140 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.047198 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.047216 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.047265 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.047284 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.150450 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.150522 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.150547 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.150575 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.150596 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.253367 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.253434 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.253451 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.253478 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.253501 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.356400 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.356467 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.356484 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.356512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.356530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.459085 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.459141 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.459158 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.459180 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.459196 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.562159 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.562250 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.562269 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.562292 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.562309 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.665436 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.665589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.665617 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.665645 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.665665 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.769553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.769620 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.769633 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.769651 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.769672 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.872524 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.872625 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.872644 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.872676 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.872708 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.975680 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.975742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.975765 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.975792 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:22 crc kubenswrapper[4783]: I0930 13:36:22.975812 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:22Z","lastTransitionTime":"2025-09-30T13:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.078984 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.079534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.079580 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.079609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.079630 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.183601 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.183662 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.183684 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.183712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.183731 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.286973 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.287062 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.287080 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.287106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.287125 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.406984 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.407052 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.407071 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.407100 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.407118 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.511770 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.511904 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.511931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.511963 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.511982 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.614795 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.614872 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.614892 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.614915 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.614932 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.718398 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.718464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.718480 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.718504 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.718524 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.821845 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.821946 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.821971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.822004 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.822027 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.843036 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.843094 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.843155 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.843164 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:23 crc kubenswrapper[4783]: E0930 13:36:23.843324 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:23 crc kubenswrapper[4783]: E0930 13:36:23.843487 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:23 crc kubenswrapper[4783]: E0930 13:36:23.843586 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:23 crc kubenswrapper[4783]: E0930 13:36:23.843646 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.925627 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.925793 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.925858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.925883 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:23 crc kubenswrapper[4783]: I0930 13:36:23.925903 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:23Z","lastTransitionTime":"2025-09-30T13:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.029305 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.029374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.029396 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.029423 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.029440 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.132820 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.132884 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.132901 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.132926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.132944 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.236105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.236184 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.236207 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.236270 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.236295 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.338877 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.338939 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.338957 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.338980 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.338999 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.442372 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.442430 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.442447 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.442471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.442487 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.545159 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.545272 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.545299 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.545332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.545355 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.648364 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.648421 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.648468 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.648493 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.648510 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.753982 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.754122 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.754150 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.754183 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.754207 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.857858 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.857988 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.858046 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.858070 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.858129 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.962006 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.962069 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.962092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.962120 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:24 crc kubenswrapper[4783]: I0930 13:36:24.962142 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:24Z","lastTransitionTime":"2025-09-30T13:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.065204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.065381 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.065403 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.065428 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.065445 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.169355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.169892 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.170027 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.170258 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.170425 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.274289 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.274793 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.274938 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.275090 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.275281 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.378402 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.378905 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.379062 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.379200 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.379407 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.483096 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.483128 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.483138 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.483163 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.483175 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.586364 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.586418 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.586449 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.586468 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.586480 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.689551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.689632 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.689666 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.689697 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.689721 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.701124 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.701303 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.701353 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.701320423 +0000 UTC m=+149.632786740 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.701456 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.701464 4783 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.701574 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.701532189 +0000 UTC m=+149.632998506 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.701589 4783 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.701695 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.701659873 +0000 UTC m=+149.633126220 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.792429 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.792490 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.792507 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.792530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.792546 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.802949 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.803000 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803136 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803156 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803168 4783 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803252 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.803206843 +0000 UTC m=+149.734673160 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803263 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803314 4783 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803333 4783 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.803416 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.803385509 +0000 UTC m=+149.734851856 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.842461 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.842540 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.842464 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.842664 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.842722 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.842836 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.842923 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:25 crc kubenswrapper[4783]: E0930 13:36:25.843000 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.896068 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.896145 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.896168 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.896197 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.896257 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.999310 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.999384 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.999400 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.999417 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:25 crc kubenswrapper[4783]: I0930 13:36:25.999427 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:25Z","lastTransitionTime":"2025-09-30T13:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.103708 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.103798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.103826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.103859 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.103885 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.207506 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.207570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.207588 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.207613 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.207634 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.310456 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.310519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.310542 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.310570 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.310593 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.413184 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.413353 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.413374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.413400 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.413424 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.517081 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.517157 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.517199 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.517307 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.517336 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.620476 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.620558 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.620575 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.620605 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.620622 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.724388 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.724476 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.724494 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.724519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.724537 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.826857 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.826958 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.826978 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.827002 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.827017 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.930087 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.930147 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.930164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.930185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:26 crc kubenswrapper[4783]: I0930 13:36:26.930204 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:26Z","lastTransitionTime":"2025-09-30T13:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.033469 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.033537 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.033555 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.033579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.033597 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.137137 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.137203 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.137256 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.137285 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.137301 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.239940 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.240000 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.240017 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.240040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.240057 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.343117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.343187 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.343211 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.343293 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.343316 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.446084 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.446141 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.446158 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.446180 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.446197 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.550280 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.550357 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.550383 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.550420 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.550446 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.654251 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.654330 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.654354 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.654383 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.654403 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.758020 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.758112 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.758165 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.758247 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.758268 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.842888 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.842918 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.842999 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.843098 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:27 crc kubenswrapper[4783]: E0930 13:36:27.843334 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:27 crc kubenswrapper[4783]: E0930 13:36:27.843485 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:27 crc kubenswrapper[4783]: E0930 13:36:27.843636 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:27 crc kubenswrapper[4783]: E0930 13:36:27.843742 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.863370 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.863416 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.863434 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.863458 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.863475 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.966551 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.966626 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.966644 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.966674 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:27 crc kubenswrapper[4783]: I0930 13:36:27.966695 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:27Z","lastTransitionTime":"2025-09-30T13:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.070198 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.070407 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.070434 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.070460 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.070479 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.173771 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.173898 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.173926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.173956 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.173976 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.276826 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.276895 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.276912 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.276943 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.276978 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.379534 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.379596 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.379615 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.379760 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.379782 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.482530 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.482595 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.482621 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.482650 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.482675 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.586108 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.586177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.586204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.586279 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.586308 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.689493 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.689573 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.689597 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.689632 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.689699 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.793513 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.793648 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.793670 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.793702 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.793728 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.897490 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.897561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.897578 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.897604 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:28 crc kubenswrapper[4783]: I0930 13:36:28.897623 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:28Z","lastTransitionTime":"2025-09-30T13:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.001039 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.001098 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.001114 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.001139 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.001157 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.104285 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.104351 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.104377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.104407 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.104427 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.207390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.207503 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.207523 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.207550 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.207570 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.310031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.310157 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.310177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.310205 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.310264 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.346088 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.346454 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.346547 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.346580 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.346633 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.368144 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.373426 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.373521 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.373539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.373562 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.373578 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.394117 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.399048 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.399142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.399162 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.399212 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.399267 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.422951 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.428802 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.428861 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.428883 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.428909 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.428928 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.450012 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.455607 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.455667 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.455688 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.455716 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.455740 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.476404 4783 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3fd15990-006c-4695-b2b8-b5f45241b454\\\",\\\"systemUUID\\\":\\\"fe87f595-c6b4-4675-9e9e-56e9408a3611\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:29Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.476622 4783 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.479144 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.479198 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.479247 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.479278 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.479369 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.582202 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.582315 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.582332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.582355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.582372 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.685377 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.685429 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.685445 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.685468 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.685487 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.788088 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.788522 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.788539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.788561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.788577 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.842047 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.842151 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.842214 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.842369 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.842484 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.842704 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.842891 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:29 crc kubenswrapper[4783]: E0930 13:36:29.843187 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.891106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.891479 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.891531 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.891561 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.891581 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.994298 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.994347 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.994366 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.994388 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:29 crc kubenswrapper[4783]: I0930 13:36:29.994407 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:29Z","lastTransitionTime":"2025-09-30T13:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.098071 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.098126 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.098143 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.098163 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.098179 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.200962 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.201022 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.201042 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.201067 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.201085 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.304720 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.304782 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.304799 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.304822 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.304839 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.407742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.407790 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.407799 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.407814 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.407827 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.511184 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.511295 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.511313 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.511334 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.511350 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.614327 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.614383 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.614401 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.614424 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.614445 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.717092 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.717151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.717169 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.717192 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.717210 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.819395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.819464 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.819484 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.819509 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.819526 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.863897 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c75e9fc-197d-4734-901c-efc548eb2102\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8ac42a67a085c732ba343f4e6beac0e7001e54993ad1813d90628d4e742906b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae56ccd010030365cc0ea39bae1ab68f37bb31850d4997e53f06027ef3ab840c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd9dfc82dedf4416e16a17cbdec282e7133ab184580498503cbc09f52bbf930b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f6b1719f72ff03cea8182efb3ea0fa06f0449f507601cc8ce90a8decbaad319\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.883427 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.898175 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7c9dc0f0-f864-4694-85e0-36831b03a073\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0babf244c6e52dd22c0ff4cac80c59e3648465b07b01fb3efa928bd51e420f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37436153a3c4bb50bf763a76ddc19682010ab9cfe3ad6effb58229f0a7bebc87\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.918758 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.923098 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.923173 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.923197 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.923255 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.923290 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:30Z","lastTransitionTime":"2025-09-30T13:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.943533 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33acf620a66272df5e8bc2b74cfdc493c89e579f89fca2679c4bd45ec95997a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.969141 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-mxltm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17cdc54d-47d0-41b9-be99-f8293fa63ec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207c3b3c9c7346a6b50ab1945e7be5949a554bd18775ab45ba857436db2c5593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d86fb3d4a018d81a39f0cfec2c43ead1c3289ef3bd75297de853a32b6c322f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c81ad64f06d4d0d3c2f5e919decb1dc336e163bf7e6fb89016269edf4aa62e4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de56ffe80b26f7422a680aa2b473d195a47b284ff87d9b72cba83f29837363d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c36f5aec9d85d553f5c22c5e86727a017086d0b56568835b899a533e6d4b4c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab4a383a8cd90700f890e8974eada49c624308eb415b0c598b7a7d103359794\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ac9a83cfd96bee2d2c811a0cc5a7657257ee6f2e4e09f5c0f8fceb8801802d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6tfvk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-mxltm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:30 crc kubenswrapper[4783]: I0930 13:36:30.987329 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfcc00d799545eb1d43f18170377fa24e8a9ab44b3e980ab5161ed42dc4384f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjh8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-668zf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:30Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.006485 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6677cf0-3613-4d5b-aad8-facb0b696402\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25232655bbb35fd9585d250a134dec02d17754eebc513502e0344820bf93210b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e60989335a4f5df01665b18ae6ede533d576de70e6ddcfa28e59f83400055051\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://026217b82bd8ae03e644f40984148495e3b4ce9d342008e6fe3070d2d0d7db10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a86e154f9d3875c0daf539c701ecc2564fa41006b5ff7e23ee03c6b6e6a12e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.026439 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.026498 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.026515 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.026539 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.026557 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.028959 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c160d78b-4de5-491c-92a8-089eece1b8b3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0dba21b3fc2efe34289d644ecd94dd57670d0f6007956ec1c31463c915986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64300072c04e59e0fc25d49864e9b9865bd0b681fda1d15eada6e7443ae2c27c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9029255c3b06d3943c7d5b43923708bb5cf9507e6454cd6991856b19befa8cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd6e08a9de2e3cea87cae1cd7cfec9291f35cd6207609aebc95bfc46cced3b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56da0b9ca0a111761872b6782b4bd87f4ad2d8683256f556565de2036e439b2e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"file observer\\\\nW0930 13:35:21.314717 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 13:35:21.314945 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 13:35:21.315963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-333029032/tls.crt::/tmp/serving-cert-333029032/tls.key\\\\\\\"\\\\nI0930 13:35:21.734878 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 13:35:21.737435 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 13:35:21.737453 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 13:35:21.737482 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 13:35:21.737489 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 13:35:21.742935 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 13:35:21.742975 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742981 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 13:35:21.742987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 13:35:21.742991 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 13:35:21.742996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 13:35:21.743000 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 13:35:21.743070 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 13:35:21.744926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e81cd698c131ba0e1f53d8002bc20879b334cf0acc6cc37587a2497a0aa5dbfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cdf57934f1161483122df8d04411f96ab6d930fc6bc053c33cb70d26a0b8fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.045555 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gcx27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53acc368-19fd-4980-a438-1122e2b7c12e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c667c17acdf820647af4823c3c7d0b560368c8448dfb3967e65ae51eb66b367f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfjth\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gcx27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.062338 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k69sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxtnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k69sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.083201 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdfb3764dd40fd24527bc7d140e2478b724ed261c47fc11665a74834be70a219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.104453 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336b751580784d9dc9e707f89a3aff83018db8741859842cac0d4272271dd48a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a13e9c86cd537004e319ac4c7ac8830f8c878e8360a780dd8c8f23e6656727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.123455 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.130086 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.130154 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.130171 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.130195 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.130212 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.141130 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kszvl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27c2621f-6e49-4cf1-bb9b-b4b77d5fc79e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ddfe1c36b615f89eeb85bafec4eeb7d5dd1b3782f8b3a40ac01a478d679276e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gfwzt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kszvl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.164477 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pmr9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4186982-08f1-4809-be4f-25f86353ccf1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:36:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:13Z\\\",\\\"message\\\":\\\"2025-09-30T13:35:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b\\\\n2025-09-30T13:35:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1707fa70-dd03-4e11-8e10-52946206c82b to /host/opt/cni/bin/\\\\n2025-09-30T13:35:28Z [verbose] multus-daemon started\\\\n2025-09-30T13:35:28Z [verbose] Readiness Indicator file check\\\\n2025-09-30T13:36:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:36:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xqvlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pmr9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.196147 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T13:36:20Z\\\",\\\"message\\\":\\\"ner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 13:36:19.743730 6821 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:19Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T13:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T13:35:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T13:35:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-298pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-22xvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.210368 4783 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d144a9a2-3cd9-4ca7-9a18-631efc4ddea2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T13:35:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffc1e668a0db950a6af52d296c998941fac4d7e25fc3b56614a77b2b4c7ca9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c8074a22e8a660d65a63f3e731fb1a8f68e3ca44aac998b80ed8d99a838bbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T13:35:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gm2hm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T13:35:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dq9kl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T13:36:31Z is after 2025-08-24T17:21:41Z" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.232986 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.233045 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.233064 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.233088 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.233105 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.335931 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.335997 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.336014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.336035 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.336056 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.440720 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.440806 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.440830 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.440864 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.440889 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.544344 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.544457 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.544482 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.544527 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.544555 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.647200 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.647289 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.647308 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.647332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.647349 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.751391 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.751495 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.751518 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.751552 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.751576 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.842983 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.843035 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.843042 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.843003 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:31 crc kubenswrapper[4783]: E0930 13:36:31.843158 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:31 crc kubenswrapper[4783]: E0930 13:36:31.843279 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:31 crc kubenswrapper[4783]: E0930 13:36:31.843412 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:31 crc kubenswrapper[4783]: E0930 13:36:31.843617 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.854569 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.854629 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.854653 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.854675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.854692 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.958031 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.958117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.958143 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.958172 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:31 crc kubenswrapper[4783]: I0930 13:36:31.958195 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:31Z","lastTransitionTime":"2025-09-30T13:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.061457 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.061535 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.061553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.061577 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.061594 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.164056 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.164127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.164154 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.164184 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.164207 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.268505 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.268580 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.268602 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.268627 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.268652 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.370976 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.371043 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.371063 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.371089 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.371109 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.474320 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.474423 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.474440 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.474466 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.474484 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.577553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.577638 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.577662 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.577694 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.577724 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.680540 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.680612 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.680636 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.680665 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.680686 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.783284 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.783322 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.783332 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.783348 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.783358 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.886188 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.886242 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.886254 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.886270 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.886282 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.989731 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.989782 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.989798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.989819 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:32 crc kubenswrapper[4783]: I0930 13:36:32.989836 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:32Z","lastTransitionTime":"2025-09-30T13:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.093007 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.093043 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.093053 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.093067 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.093078 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.195674 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.195763 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.195786 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.195816 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.195837 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.299454 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.299527 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.299544 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.299569 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.299590 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.402106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.402159 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.402172 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.402189 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.402201 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.504672 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.504731 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.504747 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.504771 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.504790 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.608135 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.608194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.608211 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.608271 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.608288 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.711483 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.711527 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.711545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.711568 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.711586 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.814336 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.814407 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.814432 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.814462 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.814482 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.842536 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.842588 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.842687 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:33 crc kubenswrapper[4783]: E0930 13:36:33.842854 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.842937 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:33 crc kubenswrapper[4783]: E0930 13:36:33.843082 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:33 crc kubenswrapper[4783]: E0930 13:36:33.843167 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:33 crc kubenswrapper[4783]: E0930 13:36:33.843312 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.917055 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.917129 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.917151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.917185 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:33 crc kubenswrapper[4783]: I0930 13:36:33.917206 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:33Z","lastTransitionTime":"2025-09-30T13:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.020336 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.020390 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.020408 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.020436 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.020453 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.122798 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.122861 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.122879 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.122905 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.122923 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.225058 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.225161 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.225181 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.225204 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.225249 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.328755 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.328818 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.328835 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.328863 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.328888 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.440011 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.440086 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.440105 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.440136 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.440158 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.543877 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.543963 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.543983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.544025 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.544064 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.647595 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.647661 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.647683 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.647713 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.647734 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.751065 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.751127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.751142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.751164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.751180 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.853137 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.853198 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.853215 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.853275 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.853294 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.957054 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.957128 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.957148 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.957176 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:34 crc kubenswrapper[4783]: I0930 13:36:34.957197 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:34Z","lastTransitionTime":"2025-09-30T13:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.061040 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.061119 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.061143 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.061173 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.061194 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.164675 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.164732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.164751 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.164785 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.164818 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.268183 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.268270 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.268289 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.268311 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.268329 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.372029 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.372114 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.372141 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.372168 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.372181 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.475507 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.475579 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.475602 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.475635 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.475658 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.578863 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.578926 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.578945 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.578971 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.578991 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.681813 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.681905 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.681923 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.681948 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.681966 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.785644 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.785723 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.785742 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.785770 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.785790 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.842907 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.842911 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.842970 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.843012 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:35 crc kubenswrapper[4783]: E0930 13:36:35.843123 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:35 crc kubenswrapper[4783]: E0930 13:36:35.843381 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:35 crc kubenswrapper[4783]: E0930 13:36:35.843570 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:35 crc kubenswrapper[4783]: E0930 13:36:35.843644 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.844361 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:36:35 crc kubenswrapper[4783]: E0930 13:36:35.844521 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.888180 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.888321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.888355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.888383 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.888404 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.991658 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.991719 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.991737 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.991761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:35 crc kubenswrapper[4783]: I0930 13:36:35.991778 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:35Z","lastTransitionTime":"2025-09-30T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.094820 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.094929 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.094954 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.094983 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.095006 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.197641 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.197712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.197730 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.197755 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.197772 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.301100 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.301157 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.301174 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.301194 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.301210 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.403061 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.403117 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.403127 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.403142 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.403173 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.506848 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.506887 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.506898 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.506912 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.506924 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.610595 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.610704 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.610722 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.610745 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.610767 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.713824 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.713891 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.713903 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.713921 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.713932 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.817064 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.817132 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.817151 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.817177 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.817196 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.920732 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.920796 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.920866 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.920890 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:36 crc kubenswrapper[4783]: I0930 13:36:36.920908 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:36Z","lastTransitionTime":"2025-09-30T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.024847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.024913 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.024930 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.024954 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.024974 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.128608 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.128676 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.128693 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.128719 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.128736 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.232048 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.232106 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.232124 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.232146 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.232163 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.336412 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.336500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.336519 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.336545 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.336563 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.439855 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.439916 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.439930 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.439952 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.439967 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.542355 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.542429 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.542447 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.542471 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.542495 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.645213 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.645292 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.645308 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.645327 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.645344 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.749497 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.749584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.749609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.749641 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.749664 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.842549 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.842573 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.842786 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.842810 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:37 crc kubenswrapper[4783]: E0930 13:36:37.842855 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:37 crc kubenswrapper[4783]: E0930 13:36:37.842971 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:37 crc kubenswrapper[4783]: E0930 13:36:37.843060 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:37 crc kubenswrapper[4783]: E0930 13:36:37.843156 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.853014 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.853064 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.853080 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.853103 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.853118 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.956847 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.956934 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.956982 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.957007 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:37 crc kubenswrapper[4783]: I0930 13:36:37.957024 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:37Z","lastTransitionTime":"2025-09-30T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.062139 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.062297 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.062318 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.062374 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.062398 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.165701 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.165761 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.165778 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.165804 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.165823 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.269648 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.269715 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.269731 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.269753 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.269768 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.373159 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.373203 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.373251 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.373274 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.373286 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.485512 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.485590 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.485617 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.485648 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.485668 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.589036 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.589110 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.589133 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.589164 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.589186 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.692452 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.692559 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.692583 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.692609 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.692628 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.795538 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.795572 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.795586 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.795606 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.795620 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.898513 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.898553 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.898566 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.898584 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:38 crc kubenswrapper[4783]: I0930 13:36:38.898597 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:38Z","lastTransitionTime":"2025-09-30T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.001924 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.001981 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.002035 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.002074 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.002113 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.106330 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.106379 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.106396 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.106419 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.106438 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.208619 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.208687 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.208712 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.208741 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.208762 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.311973 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.312050 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.312073 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.312102 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.312125 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.414538 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.414568 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.414576 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.414589 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.414598 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.518315 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.518392 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.518409 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.518433 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.518450 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.619462 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.619500 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.619508 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.619521 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.619530 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.648109 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.648290 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.648321 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.648395 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.648417 4783 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T13:36:39Z","lastTransitionTime":"2025-09-30T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.680295 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8"] Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.680938 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.683776 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.683911 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.685651 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.686029 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.702583 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=73.702562044 podStartE2EDuration="1m13.702562044s" podCreationTimestamp="2025-09-30 13:35:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.702508012 +0000 UTC m=+99.633974329" watchObservedRunningTime="2025-09-30 13:36:39.702562044 +0000 UTC m=+99.634028361" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.738842 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-mxltm" podStartSLOduration=74.738819698 podStartE2EDuration="1m14.738819698s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.738151057 +0000 UTC m=+99.669617404" watchObservedRunningTime="2025-09-30 13:36:39.738819698 +0000 UTC m=+99.670286045" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.764286 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podStartSLOduration=74.764258337 podStartE2EDuration="1m14.764258337s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.748797168 +0000 UTC m=+99.680263545" watchObservedRunningTime="2025-09-30 13:36:39.764258337 +0000 UTC m=+99.695724674" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.773075 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.773140 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55012cb4-d2e5-46b4-ad09-6284148c3089-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.773248 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.773279 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55012cb4-d2e5-46b4-ad09-6284148c3089-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.773307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/55012cb4-d2e5-46b4-ad09-6284148c3089-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.790844 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=19.790823511 podStartE2EDuration="19.790823511s" podCreationTimestamp="2025-09-30 13:36:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.773861125 +0000 UTC m=+99.705327442" watchObservedRunningTime="2025-09-30 13:36:39.790823511 +0000 UTC m=+99.722289828" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.824462 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=44.824444324 podStartE2EDuration="44.824444324s" podCreationTimestamp="2025-09-30 13:35:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.824064302 +0000 UTC m=+99.755530609" watchObservedRunningTime="2025-09-30 13:36:39.824444324 +0000 UTC m=+99.755910631" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.838998 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=77.838968754 podStartE2EDuration="1m17.838968754s" podCreationTimestamp="2025-09-30 13:35:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.838931153 +0000 UTC m=+99.770397460" watchObservedRunningTime="2025-09-30 13:36:39.838968754 +0000 UTC m=+99.770435101" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.842070 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.842131 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.842079 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.842191 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:39 crc kubenswrapper[4783]: E0930 13:36:39.842197 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:39 crc kubenswrapper[4783]: E0930 13:36:39.842310 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:39 crc kubenswrapper[4783]: E0930 13:36:39.842379 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:39 crc kubenswrapper[4783]: E0930 13:36:39.842445 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.850139 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-gcx27" podStartSLOduration=74.850105629 podStartE2EDuration="1m14.850105629s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.84977328 +0000 UTC m=+99.781239607" watchObservedRunningTime="2025-09-30 13:36:39.850105629 +0000 UTC m=+99.781571936" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.873973 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874019 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55012cb4-d2e5-46b4-ad09-6284148c3089-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874047 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/55012cb4-d2e5-46b4-ad09-6284148c3089-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874073 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874082 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874096 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55012cb4-d2e5-46b4-ad09-6284148c3089-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.874375 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/55012cb4-d2e5-46b4-ad09-6284148c3089-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.875151 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/55012cb4-d2e5-46b4-ad09-6284148c3089-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.879633 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55012cb4-d2e5-46b4-ad09-6284148c3089-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.892903 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55012cb4-d2e5-46b4-ad09-6284148c3089-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qdtg8\" (UID: \"55012cb4-d2e5-46b4-ad09-6284148c3089\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.894334 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-2pmr9" podStartSLOduration=74.894313191 podStartE2EDuration="1m14.894313191s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.893830236 +0000 UTC m=+99.825296543" watchObservedRunningTime="2025-09-30 13:36:39.894313191 +0000 UTC m=+99.825779508" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.894645 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-kszvl" podStartSLOduration=74.894639371 podStartE2EDuration="1m14.894639371s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.879846212 +0000 UTC m=+99.811312519" watchObservedRunningTime="2025-09-30 13:36:39.894639371 +0000 UTC m=+99.826105688" Sep 30 13:36:39 crc kubenswrapper[4783]: I0930 13:36:39.960006 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dq9kl" podStartSLOduration=74.959990717 podStartE2EDuration="1m14.959990717s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:39.959409529 +0000 UTC m=+99.890875836" watchObservedRunningTime="2025-09-30 13:36:39.959990717 +0000 UTC m=+99.891457024" Sep 30 13:36:40 crc kubenswrapper[4783]: I0930 13:36:40.002461 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" Sep 30 13:36:40 crc kubenswrapper[4783]: W0930 13:36:40.017119 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55012cb4_d2e5_46b4_ad09_6284148c3089.slice/crio-5a6835e101794aa02893c589a70de1b4c81fe2905ce06c62d44506da7c99fd02 WatchSource:0}: Error finding container 5a6835e101794aa02893c589a70de1b4c81fe2905ce06c62d44506da7c99fd02: Status 404 returned error can't find the container with id 5a6835e101794aa02893c589a70de1b4c81fe2905ce06c62d44506da7c99fd02 Sep 30 13:36:40 crc kubenswrapper[4783]: I0930 13:36:40.399402 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" event={"ID":"55012cb4-d2e5-46b4-ad09-6284148c3089","Type":"ContainerStarted","Data":"7aa0d63ef070640845f65c359c2af8073e501e8988f826e86c898b1f47d70746"} Sep 30 13:36:40 crc kubenswrapper[4783]: I0930 13:36:40.399471 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" event={"ID":"55012cb4-d2e5-46b4-ad09-6284148c3089","Type":"ContainerStarted","Data":"5a6835e101794aa02893c589a70de1b4c81fe2905ce06c62d44506da7c99fd02"} Sep 30 13:36:40 crc kubenswrapper[4783]: I0930 13:36:40.415295 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qdtg8" podStartSLOduration=75.415266546 podStartE2EDuration="1m15.415266546s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:36:40.414658157 +0000 UTC m=+100.346124474" watchObservedRunningTime="2025-09-30 13:36:40.415266546 +0000 UTC m=+100.346732883" Sep 30 13:36:41 crc kubenswrapper[4783]: I0930 13:36:41.843008 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:41 crc kubenswrapper[4783]: I0930 13:36:41.843037 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:41 crc kubenswrapper[4783]: I0930 13:36:41.843041 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:41 crc kubenswrapper[4783]: I0930 13:36:41.843108 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:41 crc kubenswrapper[4783]: E0930 13:36:41.843336 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:41 crc kubenswrapper[4783]: E0930 13:36:41.843702 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:41 crc kubenswrapper[4783]: E0930 13:36:41.843933 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:41 crc kubenswrapper[4783]: E0930 13:36:41.844014 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:43 crc kubenswrapper[4783]: I0930 13:36:43.842435 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:43 crc kubenswrapper[4783]: I0930 13:36:43.842467 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:43 crc kubenswrapper[4783]: E0930 13:36:43.843474 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:43 crc kubenswrapper[4783]: I0930 13:36:43.842568 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:43 crc kubenswrapper[4783]: I0930 13:36:43.842528 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:43 crc kubenswrapper[4783]: E0930 13:36:43.843661 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:43 crc kubenswrapper[4783]: E0930 13:36:43.844841 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:43 crc kubenswrapper[4783]: E0930 13:36:43.845706 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:44 crc kubenswrapper[4783]: I0930 13:36:44.019405 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:44 crc kubenswrapper[4783]: E0930 13:36:44.019522 4783 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:36:44 crc kubenswrapper[4783]: E0930 13:36:44.019568 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs podName:700fd549-bf4a-4e30-9e2c-efdb039a7ac4 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:48.019553197 +0000 UTC m=+167.951019504 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs") pod "network-metrics-daemon-k69sq" (UID: "700fd549-bf4a-4e30-9e2c-efdb039a7ac4") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 13:36:45 crc kubenswrapper[4783]: I0930 13:36:45.842807 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:45 crc kubenswrapper[4783]: I0930 13:36:45.842809 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:45 crc kubenswrapper[4783]: I0930 13:36:45.842894 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:45 crc kubenswrapper[4783]: E0930 13:36:45.843015 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:45 crc kubenswrapper[4783]: I0930 13:36:45.842894 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:45 crc kubenswrapper[4783]: E0930 13:36:45.843132 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:45 crc kubenswrapper[4783]: E0930 13:36:45.843162 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:45 crc kubenswrapper[4783]: E0930 13:36:45.843271 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:46 crc kubenswrapper[4783]: I0930 13:36:46.843861 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:36:46 crc kubenswrapper[4783]: E0930 13:36:46.844124 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:47 crc kubenswrapper[4783]: I0930 13:36:47.842128 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:47 crc kubenswrapper[4783]: I0930 13:36:47.842260 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:47 crc kubenswrapper[4783]: I0930 13:36:47.842149 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:47 crc kubenswrapper[4783]: I0930 13:36:47.842157 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:47 crc kubenswrapper[4783]: E0930 13:36:47.842352 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:47 crc kubenswrapper[4783]: E0930 13:36:47.842426 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:47 crc kubenswrapper[4783]: E0930 13:36:47.842562 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:47 crc kubenswrapper[4783]: E0930 13:36:47.842714 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:49 crc kubenswrapper[4783]: I0930 13:36:49.842186 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:49 crc kubenswrapper[4783]: I0930 13:36:49.842204 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:49 crc kubenswrapper[4783]: I0930 13:36:49.842190 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:49 crc kubenswrapper[4783]: I0930 13:36:49.842270 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:49 crc kubenswrapper[4783]: E0930 13:36:49.843880 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:49 crc kubenswrapper[4783]: E0930 13:36:49.844043 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:49 crc kubenswrapper[4783]: E0930 13:36:49.844330 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:49 crc kubenswrapper[4783]: E0930 13:36:49.844469 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:50 crc kubenswrapper[4783]: I0930 13:36:50.864833 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 13:36:51 crc kubenswrapper[4783]: I0930 13:36:51.842347 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:51 crc kubenswrapper[4783]: I0930 13:36:51.842377 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:51 crc kubenswrapper[4783]: I0930 13:36:51.842369 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:51 crc kubenswrapper[4783]: I0930 13:36:51.842354 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:51 crc kubenswrapper[4783]: E0930 13:36:51.842540 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:51 crc kubenswrapper[4783]: E0930 13:36:51.842666 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:51 crc kubenswrapper[4783]: E0930 13:36:51.842730 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:51 crc kubenswrapper[4783]: E0930 13:36:51.842933 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:53 crc kubenswrapper[4783]: I0930 13:36:53.842667 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:53 crc kubenswrapper[4783]: I0930 13:36:53.842723 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:53 crc kubenswrapper[4783]: I0930 13:36:53.842723 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:53 crc kubenswrapper[4783]: I0930 13:36:53.842962 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:53 crc kubenswrapper[4783]: E0930 13:36:53.843329 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:53 crc kubenswrapper[4783]: E0930 13:36:53.843395 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:53 crc kubenswrapper[4783]: E0930 13:36:53.843548 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:53 crc kubenswrapper[4783]: E0930 13:36:53.843679 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:55 crc kubenswrapper[4783]: I0930 13:36:55.842320 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:55 crc kubenswrapper[4783]: I0930 13:36:55.842424 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:55 crc kubenswrapper[4783]: I0930 13:36:55.842442 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:55 crc kubenswrapper[4783]: I0930 13:36:55.842346 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:55 crc kubenswrapper[4783]: E0930 13:36:55.842545 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:55 crc kubenswrapper[4783]: E0930 13:36:55.842727 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:55 crc kubenswrapper[4783]: E0930 13:36:55.842876 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:55 crc kubenswrapper[4783]: E0930 13:36:55.843067 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:57 crc kubenswrapper[4783]: I0930 13:36:57.843014 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:57 crc kubenswrapper[4783]: I0930 13:36:57.843045 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:57 crc kubenswrapper[4783]: I0930 13:36:57.843045 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:57 crc kubenswrapper[4783]: I0930 13:36:57.843204 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:57 crc kubenswrapper[4783]: E0930 13:36:57.843352 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:36:57 crc kubenswrapper[4783]: E0930 13:36:57.843440 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:57 crc kubenswrapper[4783]: E0930 13:36:57.843524 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:57 crc kubenswrapper[4783]: E0930 13:36:57.843637 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:57 crc kubenswrapper[4783]: I0930 13:36:57.844448 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:36:57 crc kubenswrapper[4783]: E0930 13:36:57.844642 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-22xvs_openshift-ovn-kubernetes(7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" Sep 30 13:36:59 crc kubenswrapper[4783]: I0930 13:36:59.842425 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:36:59 crc kubenswrapper[4783]: I0930 13:36:59.842457 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:36:59 crc kubenswrapper[4783]: I0930 13:36:59.842542 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:36:59 crc kubenswrapper[4783]: I0930 13:36:59.842552 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:36:59 crc kubenswrapper[4783]: E0930 13:36:59.842789 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:36:59 crc kubenswrapper[4783]: E0930 13:36:59.842968 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:36:59 crc kubenswrapper[4783]: E0930 13:36:59.843084 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:36:59 crc kubenswrapper[4783]: E0930 13:36:59.843252 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.466616 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/1.log" Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.467761 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/0.log" Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.467845 4783 generic.go:334] "Generic (PLEG): container finished" podID="e4186982-08f1-4809-be4f-25f86353ccf1" containerID="70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4" exitCode=1 Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.467898 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerDied","Data":"70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4"} Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.467965 4783 scope.go:117] "RemoveContainer" containerID="db44e135b3ce2cfc3db1079f16f36b57dfbd577cc4b7e4491044241d0da49044" Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.468493 4783 scope.go:117] "RemoveContainer" containerID="70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4" Sep 30 13:37:00 crc kubenswrapper[4783]: E0930 13:37:00.468725 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-2pmr9_openshift-multus(e4186982-08f1-4809-be4f-25f86353ccf1)\"" pod="openshift-multus/multus-2pmr9" podUID="e4186982-08f1-4809-be4f-25f86353ccf1" Sep 30 13:37:00 crc kubenswrapper[4783]: I0930 13:37:00.509194 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=10.50917508 podStartE2EDuration="10.50917508s" podCreationTimestamp="2025-09-30 13:36:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:00.508914782 +0000 UTC m=+120.440381179" watchObservedRunningTime="2025-09-30 13:37:00.50917508 +0000 UTC m=+120.440641387" Sep 30 13:37:00 crc kubenswrapper[4783]: E0930 13:37:00.791541 4783 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 13:37:00 crc kubenswrapper[4783]: E0930 13:37:00.943952 4783 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 13:37:01 crc kubenswrapper[4783]: I0930 13:37:01.473796 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/1.log" Sep 30 13:37:01 crc kubenswrapper[4783]: I0930 13:37:01.843023 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:01 crc kubenswrapper[4783]: I0930 13:37:01.843087 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:01 crc kubenswrapper[4783]: I0930 13:37:01.843602 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:01 crc kubenswrapper[4783]: E0930 13:37:01.843786 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:01 crc kubenswrapper[4783]: I0930 13:37:01.843805 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:01 crc kubenswrapper[4783]: E0930 13:37:01.844125 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:01 crc kubenswrapper[4783]: E0930 13:37:01.844174 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:01 crc kubenswrapper[4783]: E0930 13:37:01.844331 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:03 crc kubenswrapper[4783]: I0930 13:37:03.842663 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:03 crc kubenswrapper[4783]: I0930 13:37:03.842698 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:03 crc kubenswrapper[4783]: I0930 13:37:03.842663 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:03 crc kubenswrapper[4783]: I0930 13:37:03.842885 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:03 crc kubenswrapper[4783]: E0930 13:37:03.842869 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:03 crc kubenswrapper[4783]: E0930 13:37:03.843003 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:03 crc kubenswrapper[4783]: E0930 13:37:03.843122 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:03 crc kubenswrapper[4783]: E0930 13:37:03.843315 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:05 crc kubenswrapper[4783]: I0930 13:37:05.842137 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:05 crc kubenswrapper[4783]: E0930 13:37:05.843180 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:05 crc kubenswrapper[4783]: I0930 13:37:05.842187 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:05 crc kubenswrapper[4783]: E0930 13:37:05.843339 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:05 crc kubenswrapper[4783]: I0930 13:37:05.842199 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:05 crc kubenswrapper[4783]: E0930 13:37:05.843423 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:05 crc kubenswrapper[4783]: I0930 13:37:05.842152 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:05 crc kubenswrapper[4783]: E0930 13:37:05.843518 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:05 crc kubenswrapper[4783]: E0930 13:37:05.945418 4783 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 13:37:07 crc kubenswrapper[4783]: I0930 13:37:07.842083 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:07 crc kubenswrapper[4783]: I0930 13:37:07.842197 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:07 crc kubenswrapper[4783]: I0930 13:37:07.842347 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:07 crc kubenswrapper[4783]: E0930 13:37:07.842327 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:07 crc kubenswrapper[4783]: I0930 13:37:07.842406 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:07 crc kubenswrapper[4783]: E0930 13:37:07.842758 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:07 crc kubenswrapper[4783]: E0930 13:37:07.842871 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:07 crc kubenswrapper[4783]: E0930 13:37:07.842985 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:09 crc kubenswrapper[4783]: I0930 13:37:09.842569 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:09 crc kubenswrapper[4783]: I0930 13:37:09.842644 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:09 crc kubenswrapper[4783]: I0930 13:37:09.842663 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:09 crc kubenswrapper[4783]: E0930 13:37:09.842750 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:09 crc kubenswrapper[4783]: I0930 13:37:09.842773 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:09 crc kubenswrapper[4783]: E0930 13:37:09.842913 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:09 crc kubenswrapper[4783]: E0930 13:37:09.842971 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:09 crc kubenswrapper[4783]: E0930 13:37:09.843090 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:10 crc kubenswrapper[4783]: I0930 13:37:10.846844 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:37:10 crc kubenswrapper[4783]: E0930 13:37:10.946020 4783 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.517928 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/3.log" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.521545 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerStarted","Data":"01301faa9f4a3968edbb1103dd149e3178bf6e13dff13cf619d8aa8ae8305e56"} Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.522502 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.571935 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podStartSLOduration=106.571917964 podStartE2EDuration="1m46.571917964s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:11.57142399 +0000 UTC m=+131.502890317" watchObservedRunningTime="2025-09-30 13:37:11.571917964 +0000 UTC m=+131.503384281" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.802632 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k69sq"] Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.802765 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:11 crc kubenswrapper[4783]: E0930 13:37:11.802865 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.842046 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.842291 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.842396 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:11 crc kubenswrapper[4783]: I0930 13:37:11.842416 4783 scope.go:117] "RemoveContainer" containerID="70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4" Sep 30 13:37:11 crc kubenswrapper[4783]: E0930 13:37:11.842705 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:11 crc kubenswrapper[4783]: E0930 13:37:11.842597 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:11 crc kubenswrapper[4783]: E0930 13:37:11.842510 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:12 crc kubenswrapper[4783]: I0930 13:37:12.528374 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/1.log" Sep 30 13:37:12 crc kubenswrapper[4783]: I0930 13:37:12.528812 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerStarted","Data":"3c492124acd867cc378468935f2a044e00d8fbfc546541b973ef868724952798"} Sep 30 13:37:13 crc kubenswrapper[4783]: I0930 13:37:13.842620 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:13 crc kubenswrapper[4783]: I0930 13:37:13.842745 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:13 crc kubenswrapper[4783]: E0930 13:37:13.842862 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:13 crc kubenswrapper[4783]: I0930 13:37:13.842776 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:13 crc kubenswrapper[4783]: E0930 13:37:13.843020 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:13 crc kubenswrapper[4783]: E0930 13:37:13.843173 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:13 crc kubenswrapper[4783]: I0930 13:37:13.842620 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:13 crc kubenswrapper[4783]: E0930 13:37:13.843652 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:15 crc kubenswrapper[4783]: I0930 13:37:15.842940 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:15 crc kubenswrapper[4783]: I0930 13:37:15.843017 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:15 crc kubenswrapper[4783]: E0930 13:37:15.843202 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 13:37:15 crc kubenswrapper[4783]: I0930 13:37:15.843541 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:15 crc kubenswrapper[4783]: E0930 13:37:15.843663 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 13:37:15 crc kubenswrapper[4783]: E0930 13:37:15.843788 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 13:37:15 crc kubenswrapper[4783]: I0930 13:37:15.842975 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:15 crc kubenswrapper[4783]: E0930 13:37:15.845052 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k69sq" podUID="700fd549-bf4a-4e30-9e2c-efdb039a7ac4" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.842082 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.842150 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.842162 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.842188 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.846945 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.847041 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.847415 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.847779 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.848081 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 13:37:17 crc kubenswrapper[4783]: I0930 13:37:17.849054 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.700455 4783 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.747883 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4lcz8"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.748906 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.750003 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.758129 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.759004 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.759431 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.762241 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.762746 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.763105 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.763724 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.764264 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.768468 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.768576 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.768674 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.769160 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.782394 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mt5f7"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.783779 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.783873 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lbt9h"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.786013 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.786200 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.786648 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.787253 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.787362 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.787598 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.787908 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.787594 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.808759 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.809289 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.809569 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.809868 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810108 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810133 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810257 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810320 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810520 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810540 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810666 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810694 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810927 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.810939 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.811503 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.811717 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.812505 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.813162 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.813818 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.814623 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.815002 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.815895 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.816904 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.817088 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.819102 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.821474 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.821955 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.823078 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.823414 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.823978 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.824319 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.824850 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.831670 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-946mg"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.832133 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.839742 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.841439 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-z67sw"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844084 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.847747 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848183 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.842974 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848371 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-audit\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848405 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-etcd-client\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848428 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-serving-cert\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843096 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848452 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14dbef48-db9e-480f-bd4f-966368ed533b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848490 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e505909-2b68-4acc-ad7d-06667458728e-audit-dir\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848511 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-images\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848530 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-client\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843766 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848551 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-node-pullsecrets\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843806 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848635 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-image-import-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848658 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwmc9\" (UniqueName: \"kubernetes.io/projected/6e505909-2b68-4acc-ad7d-06667458728e-kube-api-access-dwmc9\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848677 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-audit-policies\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843869 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848698 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-encryption-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848742 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-service-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848764 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843899 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848786 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848809 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-encryption-config\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848831 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csgn9\" (UniqueName: \"kubernetes.io/projected/a2ac21c3-4001-4c91-851f-bcde41192c27-kube-api-access-csgn9\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843921 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848869 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/199c8590-e441-428e-99f7-baf1f24b3900-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848891 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-config\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.843962 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848941 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-serving-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848962 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-audit-dir\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.848985 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849012 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849037 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w9nr\" (UniqueName: \"kubernetes.io/projected/199c8590-e441-428e-99f7-baf1f24b3900-kube-api-access-5w9nr\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849070 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-serving-cert\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844039 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849129 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-config\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844076 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849161 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv7zv\" (UniqueName: \"kubernetes.io/projected/c9d62d09-2b01-4420-9dba-700e5b1d63b1-kube-api-access-hv7zv\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849215 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14dbef48-db9e-480f-bd4f-966368ed533b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844098 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849263 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzgjk\" (UniqueName: \"kubernetes.io/projected/14dbef48-db9e-480f-bd4f-966368ed533b-kube-api-access-pzgjk\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844193 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849287 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.849312 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9d62d09-2b01-4420-9dba-700e5b1d63b1-serving-cert\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844257 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.844296 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.852062 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.853727 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.855806 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.856489 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861126 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861206 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861326 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861389 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861438 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861609 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861680 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861727 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.866457 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.867258 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.868295 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.868906 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.870523 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.870759 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.871232 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.871647 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.871740 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.861615 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.882705 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.885904 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.886597 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.886694 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.887483 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.887974 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888265 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4lcz8"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888281 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888291 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.887975 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888379 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888515 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888577 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888147 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888743 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.888255 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.889015 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.889098 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.894248 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.894354 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.894541 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.895168 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.895453 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.896320 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.897334 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.897717 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.897752 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.897877 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.898752 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.899744 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.901005 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.903795 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.904112 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.904183 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.904925 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.904926 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.905498 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.906376 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.906834 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.909383 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.912355 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-946mg"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.913700 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.913961 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.914181 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.915059 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.915539 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.916185 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.916615 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.917335 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-k8cdm"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.917747 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.923850 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.924392 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.925064 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.927851 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.929028 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.929273 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.930005 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.930243 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6shjd"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.930935 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.930993 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.931174 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.931630 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jq62m"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.932963 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.933606 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mt5f7"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.947659 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.947746 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lbt9h"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950034 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-etcd-client\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950088 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-serving-cert\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950123 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950150 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-machine-approver-tls\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950182 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14dbef48-db9e-480f-bd4f-966368ed533b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950209 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2th85\" (UniqueName: \"kubernetes.io/projected/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-kube-api-access-2th85\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950280 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950312 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62rv2\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-kube-api-access-62rv2\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950339 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvgg2\" (UniqueName: \"kubernetes.io/projected/5e34af40-7563-4772-bd48-cc31a0354c25-kube-api-access-gvgg2\") pod \"downloads-7954f5f757-z67sw\" (UID: \"5e34af40-7563-4772-bd48-cc31a0354c25\") " pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950365 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e505909-2b68-4acc-ad7d-06667458728e-audit-dir\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950389 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbd38476-9515-4ef6-b260-de6a854da0f4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950418 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-images\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950446 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-client\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950471 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-node-pullsecrets\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950493 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-image-import-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950530 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctgrj\" (UniqueName: \"kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950556 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950585 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950610 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950636 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwmc9\" (UniqueName: \"kubernetes.io/projected/6e505909-2b68-4acc-ad7d-06667458728e-kube-api-access-dwmc9\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950666 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-audit-policies\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950693 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd54ba0-375c-4230-9d39-3ef77c055e7f-serving-cert\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950719 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950743 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950772 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950800 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-service-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950825 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/718955ee-bebc-4bab-8658-3e9d8a782b5c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950851 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950877 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgzq9\" (UniqueName: \"kubernetes.io/projected/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-kube-api-access-bgzq9\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950905 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-encryption-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950933 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950961 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csgn9\" (UniqueName: \"kubernetes.io/projected/a2ac21c3-4001-4c91-851f-bcde41192c27-kube-api-access-csgn9\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.950988 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.952289 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.952359 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e505909-2b68-4acc-ad7d-06667458728e-audit-dir\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.953036 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-audit-policies\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.953495 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.953795 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-image-import-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.953916 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zclz\" (UniqueName: \"kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.955057 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-images\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.955509 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-node-pullsecrets\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.958762 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-client\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.959756 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14dbef48-db9e-480f-bd4f-966368ed533b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.960753 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-service-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.969384 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-encryption-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.969673 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ac21c3-4001-4c91-851f-bcde41192c27-serving-cert\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970648 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970701 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970723 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-encryption-config\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970752 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f9c1072-438c-42a1-b380-8e1aefb0116c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970780 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/199c8590-e441-428e-99f7-baf1f24b3900-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970802 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bftkt\" (UniqueName: \"kubernetes.io/projected/2dd54ba0-375c-4230-9d39-3ef77c055e7f-kube-api-access-bftkt\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970824 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw52j\" (UniqueName: \"kubernetes.io/projected/dbd38476-9515-4ef6-b260-de6a854da0f4-kube-api-access-cw52j\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970861 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-config\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970883 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970913 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970928 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970943 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.970958 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.971757 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.971780 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96xx4\" (UniqueName: \"kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.971325 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.971734 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972163 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972364 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgxm4\" (UniqueName: \"kubernetes.io/projected/3f9c1072-438c-42a1-b380-8e1aefb0116c-kube-api-access-sgxm4\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972433 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972471 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-audit-dir\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972525 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-serving-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972560 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972591 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972629 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972683 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-trusted-ca\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972706 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972729 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-auth-proxy-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972755 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972779 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w9nr\" (UniqueName: \"kubernetes.io/projected/199c8590-e441-428e-99f7-baf1f24b3900-kube-api-access-5w9nr\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972802 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972824 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-serving-cert\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972845 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972868 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd38476-9515-4ef6-b260-de6a854da0f4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972891 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972912 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199c8590-e441-428e-99f7-baf1f24b3900-config\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972919 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-config\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973002 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv7zv\" (UniqueName: \"kubernetes.io/projected/c9d62d09-2b01-4420-9dba-700e5b1d63b1-kube-api-access-hv7zv\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973053 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973073 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973110 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9jwp\" (UniqueName: \"kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973131 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973175 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-etcd-serving-ca\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.972527 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2ac21c3-4001-4c91-851f-bcde41192c27-audit-dir\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973529 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973601 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9d62d09-2b01-4420-9dba-700e5b1d63b1-config\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973613 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-etcd-client\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973669 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973720 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e505909-2b68-4acc-ad7d-06667458728e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973797 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9d62d09-2b01-4420-9dba-700e5b1d63b1-serving-cert\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973867 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14dbef48-db9e-480f-bd4f-966368ed533b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973894 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzgjk\" (UniqueName: \"kubernetes.io/projected/14dbef48-db9e-480f-bd4f-966368ed533b-kube-api-access-pzgjk\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973915 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973979 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.973999 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974048 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-audit\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974278 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974301 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-config\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974321 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/718955ee-bebc-4bab-8658-3e9d8a782b5c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974629 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974798 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-config\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.974959 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.976479 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.977936 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.979004 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njt7f"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.979667 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.979814 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.980090 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a2ac21c3-4001-4c91-851f-bcde41192c27-audit\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.980322 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14dbef48-db9e-480f-bd4f-966368ed533b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.980397 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9d62d09-2b01-4420-9dba-700e5b1d63b1-serving-cert\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.980409 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.981940 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-serving-cert\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.983646 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.984460 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nlgqp"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.985082 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.985299 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.985476 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.985803 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.988245 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e505909-2b68-4acc-ad7d-06667458728e-encryption-config\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.988589 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/199c8590-e441-428e-99f7-baf1f24b3900-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.989337 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.990123 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.991082 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.991086 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.991664 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.992530 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.993159 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.994376 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.994942 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-458dz"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.995180 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.995470 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.995872 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz"] Sep 30 13:37:20 crc kubenswrapper[4783]: I0930 13:37:20.996343 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.006044 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.006678 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.008468 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-csqxv"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.009361 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.010705 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.010867 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.014292 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.016292 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-z67sw"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.017743 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6j7xf"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.018500 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.019107 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.020269 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.021757 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-k8cdm"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.023151 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.025864 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jq62m"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.027646 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nlgqp"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.028279 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njt7f"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.030721 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-csqxv"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.031658 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.032141 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.033437 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.034855 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-458dz"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.037614 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6j7xf"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.039652 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.051856 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.054103 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.057624 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.059508 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.060887 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.062449 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4kmnp"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.063901 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.067237 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4kmnp"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.068959 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6l7fj"] Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.069624 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.071551 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075369 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zclz\" (UniqueName: \"kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075404 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075435 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075460 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06c6f17e-509e-47c9-a7fa-26cc13ed6012-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075485 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f9c1072-438c-42a1-b380-8e1aefb0116c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075510 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bftkt\" (UniqueName: \"kubernetes.io/projected/2dd54ba0-375c-4230-9d39-3ef77c055e7f-kube-api-access-bftkt\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075532 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw52j\" (UniqueName: \"kubernetes.io/projected/dbd38476-9515-4ef6-b260-de6a854da0f4-kube-api-access-cw52j\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075566 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075600 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075621 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075645 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075667 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075687 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075707 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96xx4\" (UniqueName: \"kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075758 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4957a6e1-74ad-4d72-99c0-a11c24629f13-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.075797 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076007 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgxm4\" (UniqueName: \"kubernetes.io/projected/3f9c1072-438c-42a1-b380-8e1aefb0116c-kube-api-access-sgxm4\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076043 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076218 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-metrics-certs\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076274 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076300 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076322 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-stats-auth\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076348 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076370 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgjc\" (UniqueName: \"kubernetes.io/projected/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-kube-api-access-txgjc\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076394 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-service-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076417 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cslhd\" (UniqueName: \"kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076444 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076495 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076520 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-trusted-ca\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076542 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076567 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/38a97782-7b47-42b2-aea4-6e310de9d476-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076634 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076665 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-auth-proxy-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076679 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076694 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076841 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076920 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.076942 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077010 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077035 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-service-ca-bundle\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077069 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prvcg\" (UniqueName: \"kubernetes.io/projected/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-kube-api-access-prvcg\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077088 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4957a6e1-74ad-4d72-99c0-a11c24629f13-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077114 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077150 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd38476-9515-4ef6-b260-de6a854da0f4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077170 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077493 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077504 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-auth-proxy-config\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077589 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077630 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8s59\" (UniqueName: \"kubernetes.io/projected/6db110f9-7517-4393-ad1a-b621ed8b64f6-kube-api-access-c8s59\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077649 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4957a6e1-74ad-4d72-99c0-a11c24629f13-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077695 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077716 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9jwp\" (UniqueName: \"kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077754 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-metrics-tls\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077771 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-default-certificate\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.077788 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.078383 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-trusted-ca\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.078385 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.078447 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.078922 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.078977 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-config\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079014 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/718955ee-bebc-4bab-8658-3e9d8a782b5c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079070 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079348 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079566 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079623 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-config\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.079971 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080033 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080035 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080068 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-machine-approver-tls\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080400 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dd54ba0-375c-4230-9d39-3ef77c055e7f-config\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080608 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/718955ee-bebc-4bab-8658-3e9d8a782b5c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080641 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-serving-cert\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080706 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-client\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080757 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clgf2\" (UniqueName: \"kubernetes.io/projected/06c6f17e-509e-47c9-a7fa-26cc13ed6012-kube-api-access-clgf2\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080804 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080860 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2th85\" (UniqueName: \"kubernetes.io/projected/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-kube-api-access-2th85\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080871 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080933 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080962 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62rv2\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-kube-api-access-62rv2\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080989 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbd38476-9515-4ef6-b260-de6a854da0f4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.080986 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081015 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvgg2\" (UniqueName: \"kubernetes.io/projected/5e34af40-7563-4772-bd48-cc31a0354c25-kube-api-access-gvgg2\") pod \"downloads-7954f5f757-z67sw\" (UID: \"5e34af40-7563-4772-bd48-cc31a0354c25\") " pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081041 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vdql\" (UniqueName: \"kubernetes.io/projected/977de9a0-2247-494e-b9b2-3f7296950f1f-kube-api-access-2vdql\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081072 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctgrj\" (UniqueName: \"kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081098 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081128 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081166 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081029 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081216 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081264 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd54ba0-375c-4230-9d39-3ef77c055e7f-serving-cert\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081292 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081317 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081338 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081364 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rsmd\" (UniqueName: \"kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081393 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081421 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgzq9\" (UniqueName: \"kubernetes.io/projected/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-kube-api-access-bgzq9\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081454 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/718955ee-bebc-4bab-8658-3e9d8a782b5c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081479 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7vvf\" (UniqueName: \"kubernetes.io/projected/38a97782-7b47-42b2-aea4-6e310de9d476-kube-api-access-b7vvf\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.081699 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbd38476-9515-4ef6-b260-de6a854da0f4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082006 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082082 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082205 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082494 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082785 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd38476-9515-4ef6-b260-de6a854da0f4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.082843 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.083319 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-machine-approver-tls\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.083394 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.083816 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.083949 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.084440 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.084458 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.084644 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.085017 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.085536 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/718955ee-bebc-4bab-8658-3e9d8a782b5c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.085662 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.086105 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3f9c1072-438c-42a1-b380-8e1aefb0116c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.086265 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.086562 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dd54ba0-375c-4230-9d39-3ef77c055e7f-serving-cert\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.091276 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.091694 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.092837 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.111424 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.131402 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.151579 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.171517 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183247 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rsmd\" (UniqueName: \"kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183321 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7vvf\" (UniqueName: \"kubernetes.io/projected/38a97782-7b47-42b2-aea4-6e310de9d476-kube-api-access-b7vvf\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183367 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06c6f17e-509e-47c9-a7fa-26cc13ed6012-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183450 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4957a6e1-74ad-4d72-99c0-a11c24629f13-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183478 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183527 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183549 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-metrics-certs\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183573 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-service-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183596 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cslhd\" (UniqueName: \"kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183617 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183636 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-stats-auth\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183659 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgjc\" (UniqueName: \"kubernetes.io/projected/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-kube-api-access-txgjc\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183684 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183709 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/38a97782-7b47-42b2-aea4-6e310de9d476-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183731 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183763 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-service-ca-bundle\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183784 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prvcg\" (UniqueName: \"kubernetes.io/projected/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-kube-api-access-prvcg\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183806 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4957a6e1-74ad-4d72-99c0-a11c24629f13-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183836 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8s59\" (UniqueName: \"kubernetes.io/projected/6db110f9-7517-4393-ad1a-b621ed8b64f6-kube-api-access-c8s59\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183858 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-metrics-tls\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183878 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-default-certificate\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183899 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4957a6e1-74ad-4d72-99c0-a11c24629f13-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183946 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.183982 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-config\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184005 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-serving-cert\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184027 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-client\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184050 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clgf2\" (UniqueName: \"kubernetes.io/projected/06c6f17e-509e-47c9-a7fa-26cc13ed6012-kube-api-access-clgf2\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184069 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184119 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vdql\" (UniqueName: \"kubernetes.io/projected/977de9a0-2247-494e-b9b2-3f7296950f1f-kube-api-access-2vdql\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184158 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184675 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/38a97782-7b47-42b2-aea4-6e310de9d476-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.184749 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4957a6e1-74ad-4d72-99c0-a11c24629f13-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.186082 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4957a6e1-74ad-4d72-99c0-a11c24629f13-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.191887 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.211427 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.231416 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.236990 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-metrics-tls\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.251878 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.272283 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.291932 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.311743 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.344976 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.352068 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.371510 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.391795 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.412081 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.431836 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.452772 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.471761 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.491588 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.511666 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.531976 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.551692 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.571399 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.591701 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.611645 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.617523 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-stats-auth\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.631859 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.651763 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.658441 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-default-certificate\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.671955 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.691041 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.694791 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-service-ca-bundle\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.712190 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.731739 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.738030 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-metrics-certs\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.752542 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.757659 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-serving-cert\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.771488 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.792411 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.796947 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-client\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.811868 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.831300 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.834480 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.851705 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.871288 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.875000 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-config\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.891400 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.895410 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/977de9a0-2247-494e-b9b2-3f7296950f1f-etcd-service-ca\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.928882 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwmc9\" (UniqueName: \"kubernetes.io/projected/6e505909-2b68-4acc-ad7d-06667458728e-kube-api-access-dwmc9\") pod \"apiserver-7bbb656c7d-6h4jw\" (UID: \"6e505909-2b68-4acc-ad7d-06667458728e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.963056 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csgn9\" (UniqueName: \"kubernetes.io/projected/a2ac21c3-4001-4c91-851f-bcde41192c27-kube-api-access-csgn9\") pod \"apiserver-76f77b778f-mt5f7\" (UID: \"a2ac21c3-4001-4c91-851f-bcde41192c27\") " pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.980285 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv7zv\" (UniqueName: \"kubernetes.io/projected/c9d62d09-2b01-4420-9dba-700e5b1d63b1-kube-api-access-hv7zv\") pod \"authentication-operator-69f744f599-lbt9h\" (UID: \"c9d62d09-2b01-4420-9dba-700e5b1d63b1\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.987402 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w9nr\" (UniqueName: \"kubernetes.io/projected/199c8590-e441-428e-99f7-baf1f24b3900-kube-api-access-5w9nr\") pod \"machine-api-operator-5694c8668f-4lcz8\" (UID: \"199c8590-e441-428e-99f7-baf1f24b3900\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:21 crc kubenswrapper[4783]: I0930 13:37:21.990329 4783 request.go:700] Waited for 1.016226314s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-apiserver-operator/serviceaccounts/openshift-apiserver-operator/token Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.010870 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.017457 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzgjk\" (UniqueName: \"kubernetes.io/projected/14dbef48-db9e-480f-bd4f-966368ed533b-kube-api-access-pzgjk\") pod \"openshift-apiserver-operator-796bbdcf4f-mwplc\" (UID: \"14dbef48-db9e-480f-bd4f-966368ed533b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.034453 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.047830 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.049530 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06c6f17e-509e-47c9-a7fa-26cc13ed6012-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.051784 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.071617 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.074718 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.092975 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.107311 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.112236 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.117418 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.119087 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.132681 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.137937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.178735 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.178990 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.184658 4783 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.184738 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key podName:6db110f9-7517-4393-ad1a-b621ed8b64f6 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:22.684715909 +0000 UTC m=+142.616182226 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key") pod "service-ca-9c57cc56f-nlgqp" (UID: "6db110f9-7517-4393-ad1a-b621ed8b64f6") : failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.184964 4783 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185006 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca podName:91c12832-2428-4e1c-b9de-18936239646c nodeName:}" failed. No retries permitted until 2025-09-30 13:37:22.684996038 +0000 UTC m=+142.616462365 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca") pod "marketplace-operator-79b997595-4hvcd" (UID: "91c12832-2428-4e1c-b9de-18936239646c") : failed to sync configmap cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185025 4783 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185053 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics podName:91c12832-2428-4e1c-b9de-18936239646c nodeName:}" failed. No retries permitted until 2025-09-30 13:37:22.685045409 +0000 UTC m=+142.616511726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics") pod "marketplace-operator-79b997595-4hvcd" (UID: "91c12832-2428-4e1c-b9de-18936239646c") : failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185352 4783 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185483 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls podName:38a97782-7b47-42b2-aea4-6e310de9d476 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:22.685445451 +0000 UTC m=+142.616911799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls") pod "machine-config-controller-84d6567774-458dz" (UID: "38a97782-7b47-42b2-aea4-6e310de9d476") : failed to sync secret cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185544 4783 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: E0930 13:37:22.185600 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle podName:6db110f9-7517-4393-ad1a-b621ed8b64f6 nodeName:}" failed. No retries permitted until 2025-09-30 13:37:22.685577256 +0000 UTC m=+142.617043593 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle") pod "service-ca-9c57cc56f-nlgqp" (UID: "6db110f9-7517-4393-ad1a-b621ed8b64f6") : failed to sync configmap cache: timed out waiting for the condition Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.191817 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.212315 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.232383 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.253576 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.271272 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.291511 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.314726 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.331913 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.352697 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.372367 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.372953 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw"] Sep 30 13:37:22 crc kubenswrapper[4783]: W0930 13:37:22.378924 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e505909_2b68_4acc_ad7d_06667458728e.slice/crio-c1bf8b829a3de6d4e861bc80fe9f54b61e73ecb58a1e9c625b715850a7dc4a82 WatchSource:0}: Error finding container c1bf8b829a3de6d4e861bc80fe9f54b61e73ecb58a1e9c625b715850a7dc4a82: Status 404 returned error can't find the container with id c1bf8b829a3de6d4e861bc80fe9f54b61e73ecb58a1e9c625b715850a7dc4a82 Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.404979 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.410707 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.431439 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.437612 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mt5f7"] Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.451924 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.457982 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lbt9h"] Sep 30 13:37:22 crc kubenswrapper[4783]: W0930 13:37:22.464830 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9d62d09_2b01_4420_9dba_700e5b1d63b1.slice/crio-74595a8fe335f235d37144a220951baf27df102b82a4d557b0ce0c9e34fdf42c WatchSource:0}: Error finding container 74595a8fe335f235d37144a220951baf27df102b82a4d557b0ce0c9e34fdf42c: Status 404 returned error can't find the container with id 74595a8fe335f235d37144a220951baf27df102b82a4d557b0ce0c9e34fdf42c Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.471892 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.491777 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.511390 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.532495 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.551493 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.568850 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" event={"ID":"c9d62d09-2b01-4420-9dba-700e5b1d63b1","Type":"ContainerStarted","Data":"74595a8fe335f235d37144a220951baf27df102b82a4d557b0ce0c9e34fdf42c"} Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.572823 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.574725 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" event={"ID":"6e505909-2b68-4acc-ad7d-06667458728e","Type":"ContainerStarted","Data":"c1bf8b829a3de6d4e861bc80fe9f54b61e73ecb58a1e9c625b715850a7dc4a82"} Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.576195 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" event={"ID":"a2ac21c3-4001-4c91-851f-bcde41192c27","Type":"ContainerStarted","Data":"6678f1559b2dcaebe5eebbcc77d02274a5248406e61c1d874d147e334042af2b"} Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.595944 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.596862 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc"] Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.598199 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4lcz8"] Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.612575 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.631534 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.651093 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: W0930 13:37:22.666378 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod199c8590_e441_428e_99f7_baf1f24b3900.slice/crio-d0494582b255d2527787a7514dd0e427cc06900c23a0bf47bfbfa899ebb7fd0e WatchSource:0}: Error finding container d0494582b255d2527787a7514dd0e427cc06900c23a0bf47bfbfa899ebb7fd0e: Status 404 returned error can't find the container with id d0494582b255d2527787a7514dd0e427cc06900c23a0bf47bfbfa899ebb7fd0e Sep 30 13:37:22 crc kubenswrapper[4783]: W0930 13:37:22.667433 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14dbef48_db9e_480f_bd4f_966368ed533b.slice/crio-002db7f0a5564c96ee8cec45e83fdf61bb1a8ca52f4794f655950ddec535d3b7 WatchSource:0}: Error finding container 002db7f0a5564c96ee8cec45e83fdf61bb1a8ca52f4794f655950ddec535d3b7: Status 404 returned error can't find the container with id 002db7f0a5564c96ee8cec45e83fdf61bb1a8ca52f4794f655950ddec535d3b7 Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.671433 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.692456 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.707354 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.707771 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.707867 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.707913 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.707983 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.709948 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.711326 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.711371 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-cabundle\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.712438 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.712791 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6db110f9-7517-4393-ad1a-b621ed8b64f6-signing-key\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.713410 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/38a97782-7b47-42b2-aea4-6e310de9d476-proxy-tls\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.731613 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.751536 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.771604 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.791501 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.812469 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.837242 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.871812 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.893764 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.911790 4783 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.933835 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.951980 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 13:37:22 crc kubenswrapper[4783]: I0930 13:37:22.975998 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.010344 4783 request.go:700] Waited for 1.934682357s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console-operator/serviceaccounts/console-operator/token Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.012714 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zclz\" (UniqueName: \"kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz\") pod \"controller-manager-879f6c89f-vz2w9\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.030475 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bftkt\" (UniqueName: \"kubernetes.io/projected/2dd54ba0-375c-4230-9d39-3ef77c055e7f-kube-api-access-bftkt\") pod \"console-operator-58897d9998-946mg\" (UID: \"2dd54ba0-375c-4230-9d39-3ef77c055e7f\") " pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.047362 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw52j\" (UniqueName: \"kubernetes.io/projected/dbd38476-9515-4ef6-b260-de6a854da0f4-kube-api-access-cw52j\") pod \"openshift-config-operator-7777fb866f-dnx6l\" (UID: \"dbd38476-9515-4ef6-b260-de6a854da0f4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.067857 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96xx4\" (UniqueName: \"kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4\") pod \"oauth-openshift-558db77b4-7z2r4\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.086107 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgxm4\" (UniqueName: \"kubernetes.io/projected/3f9c1072-438c-42a1-b380-8e1aefb0116c-kube-api-access-sgxm4\") pod \"cluster-samples-operator-665b6dd947-2t6xf\" (UID: \"3f9c1072-438c-42a1-b380-8e1aefb0116c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.098377 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.107302 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9jwp\" (UniqueName: \"kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp\") pod \"route-controller-manager-6576b87f9c-fvldh\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.107493 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.121980 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.129716 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2th85\" (UniqueName: \"kubernetes.io/projected/9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7-kube-api-access-2th85\") pod \"machine-approver-56656f9798-b6c2n\" (UID: \"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.132812 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.137933 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.171155 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.195248 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgzq9\" (UniqueName: \"kubernetes.io/projected/af6b0081-12d7-4b05-b5ff-f2b9d20efa95-kube-api-access-bgzq9\") pod \"openshift-controller-manager-operator-756b6f6bc6-jj65k\" (UID: \"af6b0081-12d7-4b05-b5ff-f2b9d20efa95\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.209857 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctgrj\" (UniqueName: \"kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj\") pod \"console-f9d7485db-x9v6j\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.227759 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62rv2\" (UniqueName: \"kubernetes.io/projected/718955ee-bebc-4bab-8658-3e9d8a782b5c-kube-api-access-62rv2\") pod \"cluster-image-registry-operator-dc59b4c8b-sllkz\" (UID: \"718955ee-bebc-4bab-8658-3e9d8a782b5c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.237208 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvgg2\" (UniqueName: \"kubernetes.io/projected/5e34af40-7563-4772-bd48-cc31a0354c25-kube-api-access-gvgg2\") pod \"downloads-7954f5f757-z67sw\" (UID: \"5e34af40-7563-4772-bd48-cc31a0354c25\") " pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.244813 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rsmd\" (UniqueName: \"kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd\") pod \"collect-profiles-29320650-9kh42\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.251503 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.273158 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7vvf\" (UniqueName: \"kubernetes.io/projected/38a97782-7b47-42b2-aea4-6e310de9d476-kube-api-access-b7vvf\") pod \"machine-config-controller-84d6567774-458dz\" (UID: \"38a97782-7b47-42b2-aea4-6e310de9d476\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.285172 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cslhd\" (UniqueName: \"kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd\") pod \"marketplace-operator-79b997595-4hvcd\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.295108 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.307304 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgjc\" (UniqueName: \"kubernetes.io/projected/af39d542-9d45-4afb-8b3c-2d50e9fdfb90-kube-api-access-txgjc\") pod \"dns-operator-744455d44c-k8cdm\" (UID: \"af39d542-9d45-4afb-8b3c-2d50e9fdfb90\") " pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.312010 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.321985 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.324265 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.325191 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4957a6e1-74ad-4d72-99c0-a11c24629f13-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-npcpf\" (UID: \"4957a6e1-74ad-4d72-99c0-a11c24629f13\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.338811 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.339137 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cd8cfc4_ef55_44b7_8fc4_9c268d33d2a7.slice/crio-a82b488f702261561fc75f8591d161a919a15ef23aecef95926a79f0aa420932 WatchSource:0}: Error finding container a82b488f702261561fc75f8591d161a919a15ef23aecef95926a79f0aa420932: Status 404 returned error can't find the container with id a82b488f702261561fc75f8591d161a919a15ef23aecef95926a79f0aa420932 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.339997 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.358008 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prvcg\" (UniqueName: \"kubernetes.io/projected/d5b11b65-a14d-4f79-9c43-fbb5e93882aa-kube-api-access-prvcg\") pod \"router-default-5444994796-6shjd\" (UID: \"d5b11b65-a14d-4f79-9c43-fbb5e93882aa\") " pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.378140 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd452858f_ce60_4cf1_83ec_ac72613ca649.slice/crio-eb73b215693b0f52b4f908ee5e15bb242db1a88df130f285679fd2e9cfb9b566 WatchSource:0}: Error finding container eb73b215693b0f52b4f908ee5e15bb242db1a88df130f285679fd2e9cfb9b566: Status 404 returned error can't find the container with id eb73b215693b0f52b4f908ee5e15bb242db1a88df130f285679fd2e9cfb9b566 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.384778 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-946mg"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.387253 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8s59\" (UniqueName: \"kubernetes.io/projected/6db110f9-7517-4393-ad1a-b621ed8b64f6-kube-api-access-c8s59\") pod \"service-ca-9c57cc56f-nlgqp\" (UID: \"6db110f9-7517-4393-ad1a-b621ed8b64f6\") " pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.402254 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clgf2\" (UniqueName: \"kubernetes.io/projected/06c6f17e-509e-47c9-a7fa-26cc13ed6012-kube-api-access-clgf2\") pod \"multus-admission-controller-857f4d67dd-njt7f\" (UID: \"06c6f17e-509e-47c9-a7fa-26cc13ed6012\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.408335 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vdql\" (UniqueName: \"kubernetes.io/projected/977de9a0-2247-494e-b9b2-3f7296950f1f-kube-api-access-2vdql\") pod \"etcd-operator-b45778765-jq62m\" (UID: \"977de9a0-2247-494e-b9b2-3f7296950f1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.415276 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.551337 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-458dz"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566289 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566391 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566409 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566425 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566392 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566518 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566883 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.566927 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.567723 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568589 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568647 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568665 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcm56\" (UniqueName: \"kubernetes.io/projected/b9eab506-e988-48ab-94d2-32ffd62adb75-kube-api-access-fcm56\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568681 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llj28\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568700 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568715 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9eab506-e988-48ab-94d2-32ffd62adb75-metrics-tls\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568795 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9eab506-e988-48ab-94d2-32ffd62adb75-config-volume\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568825 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.568845 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.569921 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.06990703 +0000 UTC m=+144.001373337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.573098 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.580586 4783 generic.go:334] "Generic (PLEG): container finished" podID="6e505909-2b68-4acc-ad7d-06667458728e" containerID="a0e3a54c99e3301bdbd2629cf220892743451422ad4a530ef8cb1d1a9ed5c0cf" exitCode=0 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.580645 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" event={"ID":"6e505909-2b68-4acc-ad7d-06667458728e","Type":"ContainerDied","Data":"a0e3a54c99e3301bdbd2629cf220892743451422ad4a530ef8cb1d1a9ed5c0cf"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.582431 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" event={"ID":"c9d62d09-2b01-4420-9dba-700e5b1d63b1","Type":"ContainerStarted","Data":"da2131ec661b574bec276ec706e6dd97114da260aaa3fd3f5b570106d0485a1d"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.583578 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" event={"ID":"199c8590-e441-428e-99f7-baf1f24b3900","Type":"ContainerStarted","Data":"44815c86406ed25a648611cd6677703d7b5617be512d979b31a19bbe4c0380d6"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.583609 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" event={"ID":"199c8590-e441-428e-99f7-baf1f24b3900","Type":"ContainerStarted","Data":"d0494582b255d2527787a7514dd0e427cc06900c23a0bf47bfbfa899ebb7fd0e"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.585093 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" event={"ID":"14dbef48-db9e-480f-bd4f-966368ed533b","Type":"ContainerStarted","Data":"47a73a34e81f710d5f9b31f1b84f6d8175bb37465404599d7eb37a5b88de952c"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.585119 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" event={"ID":"14dbef48-db9e-480f-bd4f-966368ed533b","Type":"ContainerStarted","Data":"002db7f0a5564c96ee8cec45e83fdf61bb1a8ca52f4794f655950ddec535d3b7"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.586349 4783 generic.go:334] "Generic (PLEG): container finished" podID="a2ac21c3-4001-4c91-851f-bcde41192c27" containerID="e2a0c267afbf3c26460158a451f06545a9c4b66b5bd8128e90009436608f718e" exitCode=0 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.586418 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" event={"ID":"a2ac21c3-4001-4c91-851f-bcde41192c27","Type":"ContainerDied","Data":"e2a0c267afbf3c26460158a451f06545a9c4b66b5bd8128e90009436608f718e"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.587279 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" event={"ID":"d452858f-ce60-4cf1-83ec-ac72613ca649","Type":"ContainerStarted","Data":"eb73b215693b0f52b4f908ee5e15bb242db1a88df130f285679fd2e9cfb9b566"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.590351 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" event={"ID":"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7","Type":"ContainerStarted","Data":"a82b488f702261561fc75f8591d161a919a15ef23aecef95926a79f0aa420932"} Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.606547 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.613376 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38a97782_7b47_42b2_aea4_6e310de9d476.slice/crio-d69856910c5a053044d0ed6152310c495c28fd548bdac7b720634bcc8269b7a1 WatchSource:0}: Error finding container d69856910c5a053044d0ed6152310c495c28fd548bdac7b720634bcc8269b7a1: Status 404 returned error can't find the container with id d69856910c5a053044d0ed6152310c495c28fd548bdac7b720634bcc8269b7a1 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.624566 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.630737 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.640950 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbd38476_9515_4ef6_b260_de6a854da0f4.slice/crio-c6fdc3670e02f5f05ed01bc9849060bbf1ed5d683c81eb3a1ae4f5d70655e654 WatchSource:0}: Error finding container c6fdc3670e02f5f05ed01bc9849060bbf1ed5d683c81eb3a1ae4f5d70655e654: Status 404 returned error can't find the container with id c6fdc3670e02f5f05ed01bc9849060bbf1ed5d683c81eb3a1ae4f5d70655e654 Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.646817 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba290d22_ab6f_413c_9dfc_3285b83488ed.slice/crio-074d53e8e746f0b5f36ae2513c0f29c6b74f7d4ceaff880c8862d124411cfa4e WatchSource:0}: Error finding container 074d53e8e746f0b5f36ae2513c0f29c6b74f7d4ceaff880c8862d124411cfa4e: Status 404 returned error can't find the container with id 074d53e8e746f0b5f36ae2513c0f29c6b74f7d4ceaff880c8862d124411cfa4e Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.669884 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.673883 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.173847698 +0000 UTC m=+144.105314025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.674126 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9a468b-4d63-4019-85aa-970d80f8ed12-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.674274 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.674696 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb8ece4e-276a-4a00-bc65-41977183f112-proxy-tls\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.674772 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-registration-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.674792 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-mountpoint-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.675424 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-images\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.675769 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.675799 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64947936-ca6e-477f-963c-d9c3413c408a-config\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.675893 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676537 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-plugins-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-srv-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676627 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-apiservice-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676668 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676683 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64947936-ca6e-477f-963c-d9c3413c408a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676717 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-certs\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676781 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676798 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e2cbddf-22f6-4114-ba55-c45128962978-config\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676824 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676874 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7160bb17-05ab-419f-95a2-3a02e4f0770a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.676891 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-csi-data-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677257 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scst5\" (UniqueName: \"kubernetes.io/projected/fb8ece4e-276a-4a00-bc65-41977183f112-kube-api-access-scst5\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677274 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5jhk\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-kube-api-access-c5jhk\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677292 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sndvq\" (UniqueName: \"kubernetes.io/projected/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-kube-api-access-sndvq\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677306 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-webhook-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677320 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvn7q\" (UniqueName: \"kubernetes.io/projected/1f2a2e69-6d09-4819-ac74-66a5806697e0-kube-api-access-wvn7q\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677490 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677723 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdmq4\" (UniqueName: \"kubernetes.io/projected/3039e69c-4d9b-448f-a82d-748101164cfd-kube-api-access-hdmq4\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677781 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677879 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bpjz\" (UniqueName: \"kubernetes.io/projected/9e2cbddf-22f6-4114-ba55-c45128962978-kube-api-access-5bpjz\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677919 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49s7w\" (UniqueName: \"kubernetes.io/projected/76bf7889-7f8d-4b56-a600-b57a329cb120-kube-api-access-49s7w\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.677948 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-node-bootstrap-token\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678003 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92be6ca-c361-456c-9ed3-1f35aa4652f5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678028 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnhtj\" (UniqueName: \"kubernetes.io/projected/a92be6ca-c361-456c-9ed3-1f35aa4652f5-kube-api-access-fnhtj\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678051 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4tsx\" (UniqueName: \"kubernetes.io/projected/01b72b4c-9858-4ddf-9436-557dbb523e7d-kube-api-access-k4tsx\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678438 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678510 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcm56\" (UniqueName: \"kubernetes.io/projected/b9eab506-e988-48ab-94d2-32ffd62adb75-kube-api-access-fcm56\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678536 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2cbddf-22f6-4114-ba55-c45128962978-serving-cert\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678597 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llj28\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678614 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678629 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9eab506-e988-48ab-94d2-32ffd62adb75-metrics-tls\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678664 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl6z8\" (UniqueName: \"kubernetes.io/projected/b396639f-2c8b-462d-931f-c2d52fbb9d17-kube-api-access-xl6z8\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678726 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a92be6ca-c361-456c-9ed3-1f35aa4652f5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678776 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3039e69c-4d9b-448f-a82d-748101164cfd-tmpfs\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678894 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/01b72b4c-9858-4ddf-9436-557dbb523e7d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678934 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9eab506-e988-48ab-94d2-32ffd62adb75-config-volume\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.678988 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-socket-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679044 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679063 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v4sj\" (UniqueName: \"kubernetes.io/projected/5245907c-d3d1-4303-b5cc-e99345fa8f55-kube-api-access-7v4sj\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679078 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679104 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7160bb17-05ab-419f-95a2-3a02e4f0770a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679143 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679544 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64947936-ca6e-477f-963c-d9c3413c408a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679602 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-profile-collector-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679628 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b396639f-2c8b-462d-931f-c2d52fbb9d17-cert\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679651 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679675 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7160bb17-05ab-419f-95a2-3a02e4f0770a-config\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679744 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8f4h\" (UniqueName: \"kubernetes.io/projected/9c2b300d-113f-4ddc-871f-433b31336f7b-kube-api-access-z8f4h\") pod \"migrator-59844c95c7-qmsj2\" (UID: \"9c2b300d-113f-4ddc-871f-433b31336f7b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679784 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjjtg\" (UniqueName: \"kubernetes.io/projected/8b9a468b-4d63-4019-85aa-970d80f8ed12-kube-api-access-mjjtg\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.679807 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-srv-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.680085 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.680538 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.180324403 +0000 UTC m=+144.111790710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.686570 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.686654 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.686749 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9eab506-e988-48ab-94d2-32ffd62adb75-config-volume\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.688184 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9eab506-e988-48ab-94d2-32ffd62adb75-metrics-tls\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.709652 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.716192 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.726025 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-z67sw"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.732827 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcm56\" (UniqueName: \"kubernetes.io/projected/b9eab506-e988-48ab-94d2-32ffd62adb75-kube-api-access-fcm56\") pod \"dns-default-6j7xf\" (UID: \"b9eab506-e988-48ab-94d2-32ffd62adb75\") " pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.746311 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llj28\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.780961 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781107 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-certs\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781133 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781149 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e2cbddf-22f6-4114-ba55-c45128962978-config\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781165 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781183 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7160bb17-05ab-419f-95a2-3a02e4f0770a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781197 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-csi-data-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781417 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scst5\" (UniqueName: \"kubernetes.io/projected/fb8ece4e-276a-4a00-bc65-41977183f112-kube-api-access-scst5\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781438 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5jhk\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-kube-api-access-c5jhk\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781454 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sndvq\" (UniqueName: \"kubernetes.io/projected/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-kube-api-access-sndvq\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781468 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-webhook-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781482 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvn7q\" (UniqueName: \"kubernetes.io/projected/1f2a2e69-6d09-4819-ac74-66a5806697e0-kube-api-access-wvn7q\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781506 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdmq4\" (UniqueName: \"kubernetes.io/projected/3039e69c-4d9b-448f-a82d-748101164cfd-kube-api-access-hdmq4\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781520 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bpjz\" (UniqueName: \"kubernetes.io/projected/9e2cbddf-22f6-4114-ba55-c45128962978-kube-api-access-5bpjz\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781534 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49s7w\" (UniqueName: \"kubernetes.io/projected/76bf7889-7f8d-4b56-a600-b57a329cb120-kube-api-access-49s7w\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781550 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-node-bootstrap-token\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781568 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92be6ca-c361-456c-9ed3-1f35aa4652f5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781582 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnhtj\" (UniqueName: \"kubernetes.io/projected/a92be6ca-c361-456c-9ed3-1f35aa4652f5-kube-api-access-fnhtj\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781599 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4tsx\" (UniqueName: \"kubernetes.io/projected/01b72b4c-9858-4ddf-9436-557dbb523e7d-kube-api-access-k4tsx\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781617 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2cbddf-22f6-4114-ba55-c45128962978-serving-cert\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781645 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl6z8\" (UniqueName: \"kubernetes.io/projected/b396639f-2c8b-462d-931f-c2d52fbb9d17-kube-api-access-xl6z8\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781674 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a92be6ca-c361-456c-9ed3-1f35aa4652f5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781690 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3039e69c-4d9b-448f-a82d-748101164cfd-tmpfs\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781710 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/01b72b4c-9858-4ddf-9436-557dbb523e7d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781727 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-socket-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781753 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v4sj\" (UniqueName: \"kubernetes.io/projected/5245907c-d3d1-4303-b5cc-e99345fa8f55-kube-api-access-7v4sj\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781767 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781782 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7160bb17-05ab-419f-95a2-3a02e4f0770a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781799 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64947936-ca6e-477f-963c-d9c3413c408a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781820 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-profile-collector-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781835 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b396639f-2c8b-462d-931f-c2d52fbb9d17-cert\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781849 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781863 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7160bb17-05ab-419f-95a2-3a02e4f0770a-config\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781881 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8f4h\" (UniqueName: \"kubernetes.io/projected/9c2b300d-113f-4ddc-871f-433b31336f7b-kube-api-access-z8f4h\") pod \"migrator-59844c95c7-qmsj2\" (UID: \"9c2b300d-113f-4ddc-871f-433b31336f7b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781898 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjjtg\" (UniqueName: \"kubernetes.io/projected/8b9a468b-4d63-4019-85aa-970d80f8ed12-kube-api-access-mjjtg\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781912 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-srv-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781929 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9a468b-4d63-4019-85aa-970d80f8ed12-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781946 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb8ece4e-276a-4a00-bc65-41977183f112-proxy-tls\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781960 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-registration-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.781976 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-mountpoint-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782002 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-images\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782022 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782041 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64947936-ca6e-477f-963c-d9c3413c408a-config\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782061 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-plugins-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782077 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-srv-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782092 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-apiservice-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.782108 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64947936-ca6e-477f-963c-d9c3413c408a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.782242 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.282206746 +0000 UTC m=+144.213673053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.787048 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-webhook-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.789302 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92be6ca-c361-456c-9ed3-1f35aa4652f5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.789427 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-csi-data-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.790017 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e2cbddf-22f6-4114-ba55-c45128962978-config\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.790810 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-certs\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.791559 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-mountpoint-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.791822 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3039e69c-4d9b-448f-a82d-748101164cfd-tmpfs\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.792500 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e2cbddf-22f6-4114-ba55-c45128962978-serving-cert\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.792567 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-plugins-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.792721 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64947936-ca6e-477f-963c-d9c3413c408a-config\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.792974 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64947936-ca6e-477f-963c-d9c3413c408a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.793075 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.793104 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.793165 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-registration-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.793196 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-images\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.794393 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f2a2e69-6d09-4819-ac74-66a5806697e0-socket-dir\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.794853 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5245907c-d3d1-4303-b5cc-e99345fa8f55-node-bootstrap-token\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.813265 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb8ece4e-276a-4a00-bc65-41977183f112-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.814630 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a92be6ca-c361-456c-9ed3-1f35aa4652f5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.816458 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.817492 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7160bb17-05ab-419f-95a2-3a02e4f0770a-config\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.817541 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7160bb17-05ab-419f-95a2-3a02e4f0770a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.819489 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b396639f-2c8b-462d-931f-c2d52fbb9d17-cert\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.819543 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3039e69c-4d9b-448f-a82d-748101164cfd-apiservice-cert\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.819546 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-srv-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.819484 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/76bf7889-7f8d-4b56-a600-b57a329cb120-profile-collector-cert\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.819559 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/01b72b4c-9858-4ddf-9436-557dbb523e7d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.822770 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.822997 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9a468b-4d63-4019-85aa-970d80f8ed12-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.826955 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl6z8\" (UniqueName: \"kubernetes.io/projected/b396639f-2c8b-462d-931f-c2d52fbb9d17-kube-api-access-xl6z8\") pod \"ingress-canary-csqxv\" (UID: \"b396639f-2c8b-462d-931f-c2d52fbb9d17\") " pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.862848 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb8ece4e-276a-4a00-bc65-41977183f112-proxy-tls\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.864489 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-srv-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.864599 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-profile-collector-cert\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.866940 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvn7q\" (UniqueName: \"kubernetes.io/projected/1f2a2e69-6d09-4819-ac74-66a5806697e0-kube-api-access-wvn7q\") pod \"csi-hostpathplugin-4kmnp\" (UID: \"1f2a2e69-6d09-4819-ac74-66a5806697e0\") " pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.883128 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.883627 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.383614994 +0000 UTC m=+144.315081291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.884369 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnhtj\" (UniqueName: \"kubernetes.io/projected/a92be6ca-c361-456c-9ed3-1f35aa4652f5-kube-api-access-fnhtj\") pod \"kube-storage-version-migrator-operator-b67b599dd-kqqxv\" (UID: \"a92be6ca-c361-456c-9ed3-1f35aa4652f5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.900961 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdmq4\" (UniqueName: \"kubernetes.io/projected/3039e69c-4d9b-448f-a82d-748101164cfd-kube-api-access-hdmq4\") pod \"packageserver-d55dfcdfc-fglf6\" (UID: \"3039e69c-4d9b-448f-a82d-748101164cfd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.913827 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4tsx\" (UniqueName: \"kubernetes.io/projected/01b72b4c-9858-4ddf-9436-557dbb523e7d-kube-api-access-k4tsx\") pod \"control-plane-machine-set-operator-78cbb6b69f-5fpgz\" (UID: \"01b72b4c-9858-4ddf-9436-557dbb523e7d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.919551 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.930326 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.934576 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-csqxv" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.940319 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.944265 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bpjz\" (UniqueName: \"kubernetes.io/projected/9e2cbddf-22f6-4114-ba55-c45128962978-kube-api-access-5bpjz\") pod \"service-ca-operator-777779d784-wf8bd\" (UID: \"9e2cbddf-22f6-4114-ba55-c45128962978\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.947758 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6af2276a_3ae6_4c19_b75c_935d765d3890.slice/crio-1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024 WatchSource:0}: Error finding container 1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024: Status 404 returned error can't find the container with id 1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024 Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.952258 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e34af40_7563_4772_bd48_cc31a0354c25.slice/crio-210a3a36de37e9d3f650990757772dec398d1ea57a88691845031712fc882d96 WatchSource:0}: Error finding container 210a3a36de37e9d3f650990757772dec398d1ea57a88691845031712fc882d96: Status 404 returned error can't find the container with id 210a3a36de37e9d3f650990757772dec398d1ea57a88691845031712fc882d96 Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.973126 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.977938 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5jhk\" (UniqueName: \"kubernetes.io/projected/d9b9dda7-ec4e-4773-b6a1-9f5636370dc5-kube-api-access-c5jhk\") pod \"ingress-operator-5b745b69d9-48n8x\" (UID: \"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.984098 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.984614 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.484589018 +0000 UTC m=+144.416055325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.984696 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:23 crc kubenswrapper[4783]: E0930 13:37:23.985298 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.485283019 +0000 UTC m=+144.416756586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:23 crc kubenswrapper[4783]: W0930 13:37:23.991401 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5b11b65_a14d_4f79_9c43_fbb5e93882aa.slice/crio-dd5f1e08e37052695811eb4f8ac8587146dc39fd8c200713a6ccbb352ae110dd WatchSource:0}: Error finding container dd5f1e08e37052695811eb4f8ac8587146dc39fd8c200713a6ccbb352ae110dd: Status 404 returned error can't find the container with id dd5f1e08e37052695811eb4f8ac8587146dc39fd8c200713a6ccbb352ae110dd Sep 30 13:37:23 crc kubenswrapper[4783]: I0930 13:37:23.992028 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49s7w\" (UniqueName: \"kubernetes.io/projected/76bf7889-7f8d-4b56-a600-b57a329cb120-kube-api-access-49s7w\") pod \"olm-operator-6b444d44fb-h7ww6\" (UID: \"76bf7889-7f8d-4b56-a600-b57a329cb120\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.005807 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scst5\" (UniqueName: \"kubernetes.io/projected/fb8ece4e-276a-4a00-bc65-41977183f112-kube-api-access-scst5\") pod \"machine-config-operator-74547568cd-kgm46\" (UID: \"fb8ece4e-276a-4a00-bc65-41977183f112\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.029308 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8f4h\" (UniqueName: \"kubernetes.io/projected/9c2b300d-113f-4ddc-871f-433b31336f7b-kube-api-access-z8f4h\") pod \"migrator-59844c95c7-qmsj2\" (UID: \"9c2b300d-113f-4ddc-871f-433b31336f7b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.034932 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.045046 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sndvq\" (UniqueName: \"kubernetes.io/projected/3bc346e5-7f91-4375-ac44-6bf5fa06f4fa-kube-api-access-sndvq\") pod \"catalog-operator-68c6474976-8p9wh\" (UID: \"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.067706 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjjtg\" (UniqueName: \"kubernetes.io/projected/8b9a468b-4d63-4019-85aa-970d80f8ed12-kube-api-access-mjjtg\") pod \"package-server-manager-789f6589d5-vqmwl\" (UID: \"8b9a468b-4d63-4019-85aa-970d80f8ed12\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.077946 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.086083 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.086471 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.586453079 +0000 UTC m=+144.517919386 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.087858 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64947936-ca6e-477f-963c-d9c3413c408a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mls7z\" (UID: \"64947936-ca6e-477f-963c-d9c3413c408a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.107548 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.111160 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v4sj\" (UniqueName: \"kubernetes.io/projected/5245907c-d3d1-4303-b5cc-e99345fa8f55-kube-api-access-7v4sj\") pod \"machine-config-server-6l7fj\" (UID: \"5245907c-d3d1-4303-b5cc-e99345fa8f55\") " pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.116347 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.120519 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.135541 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nlgqp"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.144969 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7160bb17-05ab-419f-95a2-3a02e4f0770a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2ppb5\" (UID: \"7160bb17-05ab-419f-95a2-3a02e4f0770a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.147434 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jq62m"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.163686 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.167953 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.176242 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-njt7f"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.180829 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.190976 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.192379 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.192691 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.69267986 +0000 UTC m=+144.624146167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.193206 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.196843 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.204802 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.212369 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.231312 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.270521 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-k8cdm"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.271672 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6l7fj" Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.274871 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod977de9a0_2247_494e_b9b2_3f7296950f1f.slice/crio-511aacc0fbd1165d075a6ec67c6b08aa8b4e07bdefdae4c2298658f3b9b3305d WatchSource:0}: Error finding container 511aacc0fbd1165d075a6ec67c6b08aa8b4e07bdefdae4c2298658f3b9b3305d: Status 404 returned error can't find the container with id 511aacc0fbd1165d075a6ec67c6b08aa8b4e07bdefdae4c2298658f3b9b3305d Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.293694 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.293910 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.793882251 +0000 UTC m=+144.725348558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.293968 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.294345 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.794333576 +0000 UTC m=+144.725799883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.337615 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4957a6e1_74ad_4d72_99c0_a11c24629f13.slice/crio-bf14930e624ab8e08cb2bf7da61f82f56074ee171ae9f982b6d5255c8ab3ee07 WatchSource:0}: Error finding container bf14930e624ab8e08cb2bf7da61f82f56074ee171ae9f982b6d5255c8ab3ee07: Status 404 returned error can't find the container with id bf14930e624ab8e08cb2bf7da61f82f56074ee171ae9f982b6d5255c8ab3ee07 Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.338293 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c6f17e_509e_47c9_a7fa_26cc13ed6012.slice/crio-d3c9391c03fefd46b4b339d3fca6f3ba857885718c816517f06eaef7f27e913f WatchSource:0}: Error finding container d3c9391c03fefd46b4b339d3fca6f3ba857885718c816517f06eaef7f27e913f: Status 404 returned error can't find the container with id d3c9391c03fefd46b4b339d3fca6f3ba857885718c816517f06eaef7f27e913f Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.351280 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf6b0081_12d7_4b05_b5ff_f2b9d20efa95.slice/crio-f5ac3caf45bbd85591af968796884a9012a32f404f96403d5cdaf4e1b356491a WatchSource:0}: Error finding container f5ac3caf45bbd85591af968796884a9012a32f404f96403d5cdaf4e1b356491a: Status 404 returned error can't find the container with id f5ac3caf45bbd85591af968796884a9012a32f404f96403d5cdaf4e1b356491a Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.365459 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod718955ee_bebc_4bab_8658_3e9d8a782b5c.slice/crio-3f0fed8e8b9a25f2724ee823b541daccce60c4865774654bfe431cc41dad3ac5 WatchSource:0}: Error finding container 3f0fed8e8b9a25f2724ee823b541daccce60c4865774654bfe431cc41dad3ac5: Status 404 returned error can't find the container with id 3f0fed8e8b9a25f2724ee823b541daccce60c4865774654bfe431cc41dad3ac5 Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.367778 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf39d542_9d45_4afb_8b3c_2d50e9fdfb90.slice/crio-68be4dc2600cd4bc027724d826369ad69214c45418c011f0b5a76e6fdff822c1 WatchSource:0}: Error finding container 68be4dc2600cd4bc027724d826369ad69214c45418c011f0b5a76e6fdff822c1: Status 404 returned error can't find the container with id 68be4dc2600cd4bc027724d826369ad69214c45418c011f0b5a76e6fdff822c1 Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.369054 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.394857 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.395063 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.89501807 +0000 UTC m=+144.826484387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.395181 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.395548 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.895537437 +0000 UTC m=+144.827003794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.439829 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-csqxv"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.469271 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.497677 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.498075 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:24.998058779 +0000 UTC m=+144.929525086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.571601 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.600144 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.600468 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.100457479 +0000 UTC m=+145.031923786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.605347 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" event={"ID":"718955ee-bebc-4bab-8658-3e9d8a782b5c","Type":"ContainerStarted","Data":"3f0fed8e8b9a25f2724ee823b541daccce60c4865774654bfe431cc41dad3ac5"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.608813 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" event={"ID":"ba290d22-ab6f-413c-9dfc-3285b83488ed","Type":"ContainerStarted","Data":"074d53e8e746f0b5f36ae2513c0f29c6b74f7d4ceaff880c8862d124411cfa4e"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.610427 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" event={"ID":"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7","Type":"ContainerStarted","Data":"08683b13cb43b24cdb07f30c254490c0ac2a45821be1040ee0d7e7c7533ea043"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.611507 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6shjd" event={"ID":"d5b11b65-a14d-4f79-9c43-fbb5e93882aa","Type":"ContainerStarted","Data":"dd5f1e08e37052695811eb4f8ac8587146dc39fd8c200713a6ccbb352ae110dd"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.613544 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" event={"ID":"4957a6e1-74ad-4d72-99c0-a11c24629f13","Type":"ContainerStarted","Data":"bf14930e624ab8e08cb2bf7da61f82f56074ee171ae9f982b6d5255c8ab3ee07"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.615333 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" event={"ID":"af6b0081-12d7-4b05-b5ff-f2b9d20efa95","Type":"ContainerStarted","Data":"f5ac3caf45bbd85591af968796884a9012a32f404f96403d5cdaf4e1b356491a"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.616129 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" event={"ID":"38a97782-7b47-42b2-aea4-6e310de9d476","Type":"ContainerStarted","Data":"d69856910c5a053044d0ed6152310c495c28fd548bdac7b720634bcc8269b7a1"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.617172 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" event={"ID":"06c6f17e-509e-47c9-a7fa-26cc13ed6012","Type":"ContainerStarted","Data":"d3c9391c03fefd46b4b339d3fca6f3ba857885718c816517f06eaef7f27e913f"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.617852 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x9v6j" event={"ID":"ca271c35-f2e4-4c56-a82b-4f47591904f1","Type":"ContainerStarted","Data":"a0a9b7681f0ef4f26c409acee357008b2f3ec5fd99a7bed7ad97536b1318350d"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.619327 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" event={"ID":"199c8590-e441-428e-99f7-baf1f24b3900","Type":"ContainerStarted","Data":"43df9aae6cb41b202316a3a29844f514ca0f9234df8b4b2568bc339140f79a74"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.621187 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-946mg" event={"ID":"2dd54ba0-375c-4230-9d39-3ef77c055e7f","Type":"ContainerStarted","Data":"a69e9ddf5c4f4df983138ceffcdba496752349a42e45d597726dc41e1ba0a2d7"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.622424 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" event={"ID":"977de9a0-2247-494e-b9b2-3f7296950f1f","Type":"ContainerStarted","Data":"511aacc0fbd1165d075a6ec67c6b08aa8b4e07bdefdae4c2298658f3b9b3305d"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.623180 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z67sw" event={"ID":"5e34af40-7563-4772-bd48-cc31a0354c25","Type":"ContainerStarted","Data":"210a3a36de37e9d3f650990757772dec398d1ea57a88691845031712fc882d96"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.624121 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" event={"ID":"af39d542-9d45-4afb-8b3c-2d50e9fdfb90","Type":"ContainerStarted","Data":"68be4dc2600cd4bc027724d826369ad69214c45418c011f0b5a76e6fdff822c1"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.624884 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" event={"ID":"6db110f9-7517-4393-ad1a-b621ed8b64f6","Type":"ContainerStarted","Data":"c553b9c4019496d5ac2ca0d3b19e2159eacddc1e9d3bf2e103078fca769fe63e"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.625641 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" event={"ID":"91c12832-2428-4e1c-b9de-18936239646c","Type":"ContainerStarted","Data":"e70391c3e887db36220243fa83636c76aeddf103fb41e90a0b94dcecf342e889"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.626575 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" event={"ID":"6af2276a-3ae6-4c19-b75c-935d765d3890","Type":"ContainerStarted","Data":"1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.627438 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" event={"ID":"96c43e6b-e8b0-4282-8882-cafa0a59c2d1","Type":"ContainerStarted","Data":"c1d5e19b0552471254c4262a41bc0e8331f3bf684cf3cb0a91d87169463868c1"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.628277 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" event={"ID":"dbd38476-9515-4ef6-b260-de6a854da0f4","Type":"ContainerStarted","Data":"c6fdc3670e02f5f05ed01bc9849060bbf1ed5d683c81eb3a1ae4f5d70655e654"} Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.690287 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.692941 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4kmnp"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.694689 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6j7xf"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.696168 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.701402 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.701601 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.201582208 +0000 UTC m=+145.133048515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.701833 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.703521 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.203502298 +0000 UTC m=+145.134968615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.711574 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.713004 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z"] Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.803275 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.803422 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.303401338 +0000 UTC m=+145.234867655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.803526 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.803859 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.303847972 +0000 UTC m=+145.235314279 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.849459 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:37:24 crc kubenswrapper[4783]: W0930 13:37:24.852499 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb396639f_2c8b_462d_931f_c2d52fbb9d17.slice/crio-4329f5e48467d7d06c0ed921bdaba14053e06387ad7db95d5e42acb9ceb3678d WatchSource:0}: Error finding container 4329f5e48467d7d06c0ed921bdaba14053e06387ad7db95d5e42acb9ceb3678d: Status 404 returned error can't find the container with id 4329f5e48467d7d06c0ed921bdaba14053e06387ad7db95d5e42acb9ceb3678d Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.904695 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.904913 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.404892458 +0000 UTC m=+145.336358765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:24 crc kubenswrapper[4783]: I0930 13:37:24.905867 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:24 crc kubenswrapper[4783]: E0930 13:37:24.906678 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.406651164 +0000 UTC m=+145.338117511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.007074 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.007518 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.507502164 +0000 UTC m=+145.438968471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.032214 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01b72b4c_9858_4ddf_9436_557dbb523e7d.slice/crio-4ef39c12618b05e57ec883d42535b8a4d4b80fb5a48951418955869b9f8a393f WatchSource:0}: Error finding container 4ef39c12618b05e57ec883d42535b8a4d4b80fb5a48951418955869b9f8a393f: Status 404 returned error can't find the container with id 4ef39c12618b05e57ec883d42535b8a4d4b80fb5a48951418955869b9f8a393f Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.044315 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda92be6ca_c361_456c_9ed3_1f35aa4652f5.slice/crio-bb49c06335bfef48dbc96ad76cf451e482aee51096c00c9d66fdd9965e426d75 WatchSource:0}: Error finding container bb49c06335bfef48dbc96ad76cf451e482aee51096c00c9d66fdd9965e426d75: Status 404 returned error can't find the container with id bb49c06335bfef48dbc96ad76cf451e482aee51096c00c9d66fdd9965e426d75 Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.050581 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f2a2e69_6d09_4819_ac74_66a5806697e0.slice/crio-1fd4f496bc1b50b10698a35912c5d68fdccd3d5ccbd8cd84ad6f91600f33c182 WatchSource:0}: Error finding container 1fd4f496bc1b50b10698a35912c5d68fdccd3d5ccbd8cd84ad6f91600f33c182: Status 404 returned error can't find the container with id 1fd4f496bc1b50b10698a35912c5d68fdccd3d5ccbd8cd84ad6f91600f33c182 Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.054449 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9eab506_e988_48ab_94d2_32ffd62adb75.slice/crio-fdf055cc628774233820b4f7e60bfdafb6895890a4aece9e4797217184513886 WatchSource:0}: Error finding container fdf055cc628774233820b4f7e60bfdafb6895890a4aece9e4797217184513886: Status 404 returned error can't find the container with id fdf055cc628774233820b4f7e60bfdafb6895890a4aece9e4797217184513886 Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.058423 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9b9dda7_ec4e_4773_b6a1_9f5636370dc5.slice/crio-cc4137d8ad04d107de9919ef2acfc847727f3582717b3bf4b8c2e006c2c5ab58 WatchSource:0}: Error finding container cc4137d8ad04d107de9919ef2acfc847727f3582717b3bf4b8c2e006c2c5ab58: Status 404 returned error can't find the container with id cc4137d8ad04d107de9919ef2acfc847727f3582717b3bf4b8c2e006c2c5ab58 Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.061573 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64947936_ca6e_477f_963c_d9c3413c408a.slice/crio-4dff3373f5f7a43d468756c51e706c068fdc21f44e9ac3bfeb6adb94b32a3a80 WatchSource:0}: Error finding container 4dff3373f5f7a43d468756c51e706c068fdc21f44e9ac3bfeb6adb94b32a3a80: Status 404 returned error can't find the container with id 4dff3373f5f7a43d468756c51e706c068fdc21f44e9ac3bfeb6adb94b32a3a80 Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.064943 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b9a468b_4d63_4019_85aa_970d80f8ed12.slice/crio-bd84457b671da098e77b1f37dce774e36380ad5dec2a8aef1bd38fd8cd25ab2c WatchSource:0}: Error finding container bd84457b671da098e77b1f37dce774e36380ad5dec2a8aef1bd38fd8cd25ab2c: Status 404 returned error can't find the container with id bd84457b671da098e77b1f37dce774e36380ad5dec2a8aef1bd38fd8cd25ab2c Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.108709 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.108995 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.608981204 +0000 UTC m=+145.540447511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.210928 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.211770 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.711683932 +0000 UTC m=+145.643150239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.217808 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.218502 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.718470018 +0000 UTC m=+145.649936325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.309730 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.321216 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.321345 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.821329461 +0000 UTC m=+145.752795768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.321532 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.321796 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.821789786 +0000 UTC m=+145.753256083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.422880 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.423377 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:25.923359889 +0000 UTC m=+145.854826196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.453298 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e2cbddf_22f6_4114_ba55_c45128962978.slice/crio-517103eb9579346b3b97de67a23cb65d2f7aeaf5d8fac91a044789879cf3a426 WatchSource:0}: Error finding container 517103eb9579346b3b97de67a23cb65d2f7aeaf5d8fac91a044789879cf3a426: Status 404 returned error can't find the container with id 517103eb9579346b3b97de67a23cb65d2f7aeaf5d8fac91a044789879cf3a426 Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.524983 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.525505 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.025487969 +0000 UTC m=+145.956954276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.568071 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-lbt9h" podStartSLOduration=120.56804639 podStartE2EDuration="2m0.56804639s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:25.563747354 +0000 UTC m=+145.495213671" watchObservedRunningTime="2025-09-30 13:37:25.56804639 +0000 UTC m=+145.499512707" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.588687 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.593445 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.600019 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.626801 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.627000 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.126972809 +0000 UTC m=+146.058439116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.627242 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.627538 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.127523667 +0000 UTC m=+146.058989974 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.636480 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" event={"ID":"6e505909-2b68-4acc-ad7d-06667458728e","Type":"ContainerStarted","Data":"2b51bf7ffc22605095a7142d52fdbb1b2a84387077dd54a9fc165cbd6f43c6de"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.640588 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" event={"ID":"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5","Type":"ContainerStarted","Data":"cc4137d8ad04d107de9919ef2acfc847727f3582717b3bf4b8c2e006c2c5ab58"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.641064 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mwplc" podStartSLOduration=120.641051156 podStartE2EDuration="2m0.641051156s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:25.640279141 +0000 UTC m=+145.571745458" watchObservedRunningTime="2025-09-30 13:37:25.641051156 +0000 UTC m=+145.572517473" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.641907 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" event={"ID":"8b9a468b-4d63-4019-85aa-970d80f8ed12","Type":"ContainerStarted","Data":"bd84457b671da098e77b1f37dce774e36380ad5dec2a8aef1bd38fd8cd25ab2c"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.643113 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" event={"ID":"6af2276a-3ae6-4c19-b75c-935d765d3890","Type":"ContainerStarted","Data":"af7a544648adb8b681260694207537bd4b9123dbb3afae22df9becc0d3095939"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.649793 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-csqxv" event={"ID":"b396639f-2c8b-462d-931f-c2d52fbb9d17","Type":"ContainerStarted","Data":"4329f5e48467d7d06c0ed921bdaba14053e06387ad7db95d5e42acb9ceb3678d"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.650727 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" event={"ID":"3f9c1072-438c-42a1-b380-8e1aefb0116c","Type":"ContainerStarted","Data":"2d1afd438e443a74f55a34f7044dddeb03747566ac1e931f2609c497046717aa"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.652322 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" event={"ID":"a2ac21c3-4001-4c91-851f-bcde41192c27","Type":"ContainerStarted","Data":"c9d6c76a2f934bcbbafba9783c9b56725ddc6e6e139f6212ecc534e69e569c0c"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.653062 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" event={"ID":"a92be6ca-c361-456c-9ed3-1f35aa4652f5","Type":"ContainerStarted","Data":"bb49c06335bfef48dbc96ad76cf451e482aee51096c00c9d66fdd9965e426d75"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.653847 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6j7xf" event={"ID":"b9eab506-e988-48ab-94d2-32ffd62adb75","Type":"ContainerStarted","Data":"fdf055cc628774233820b4f7e60bfdafb6895890a4aece9e4797217184513886"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.655868 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" event={"ID":"96c43e6b-e8b0-4282-8882-cafa0a59c2d1","Type":"ContainerStarted","Data":"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.657209 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" event={"ID":"9e2cbddf-22f6-4114-ba55-c45128962978","Type":"ContainerStarted","Data":"517103eb9579346b3b97de67a23cb65d2f7aeaf5d8fac91a044789879cf3a426"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.659663 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-946mg" event={"ID":"2dd54ba0-375c-4230-9d39-3ef77c055e7f","Type":"ContainerStarted","Data":"76d8c5f6b9528b2523004720cf31fcd5cfef66fec97a60d1dbf3d4877b830bfa"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.660854 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" event={"ID":"d452858f-ce60-4cf1-83ec-ac72613ca649","Type":"ContainerStarted","Data":"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.661029 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.662713 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" event={"ID":"ba290d22-ab6f-413c-9dfc-3285b83488ed","Type":"ContainerStarted","Data":"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.664279 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" event={"ID":"38a97782-7b47-42b2-aea4-6e310de9d476","Type":"ContainerStarted","Data":"734cd6e800483ada25d173c53538baec4573a455db63292ad0153c727a1ec1c9"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.665544 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" event={"ID":"9c2b300d-113f-4ddc-871f-433b31336f7b","Type":"ContainerStarted","Data":"abbc3f3e9b601058b15bf4678473810f9a1a45258681a55301910ef4bb11af70"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.666774 4783 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7z2r4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.666810 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.667267 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6l7fj" event={"ID":"5245907c-d3d1-4303-b5cc-e99345fa8f55","Type":"ContainerStarted","Data":"3f0ec29c1ee29d6437f7c7a2dce7245e7b77f91abf5c77076974eb3af686edf3"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.668374 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" event={"ID":"dbd38476-9515-4ef6-b260-de6a854da0f4","Type":"ContainerStarted","Data":"1ce6c24378dd75ad080afbcaed852a1b1c132b9473ee03e3ca718c2371402446"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.669775 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" event={"ID":"1f2a2e69-6d09-4819-ac74-66a5806697e0","Type":"ContainerStarted","Data":"1fd4f496bc1b50b10698a35912c5d68fdccd3d5ccbd8cd84ad6f91600f33c182"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.679288 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" event={"ID":"64947936-ca6e-477f-963c-d9c3413c408a","Type":"ContainerStarted","Data":"4dff3373f5f7a43d468756c51e706c068fdc21f44e9ac3bfeb6adb94b32a3a80"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.679818 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.680035 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" event={"ID":"01b72b4c-9858-4ddf-9436-557dbb523e7d","Type":"ContainerStarted","Data":"4ef39c12618b05e57ec883d42535b8a4d4b80fb5a48951418955869b9f8a393f"} Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.681947 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6"] Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.727899 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.728922 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.228897994 +0000 UTC m=+146.160364301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.739747 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-4lcz8" podStartSLOduration=120.739730527 podStartE2EDuration="2m0.739730527s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:25.738578731 +0000 UTC m=+145.670045048" watchObservedRunningTime="2025-09-30 13:37:25.739730527 +0000 UTC m=+145.671196834" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.785183 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" podStartSLOduration=120.785161969 podStartE2EDuration="2m0.785161969s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:25.782971079 +0000 UTC m=+145.714437386" watchObservedRunningTime="2025-09-30 13:37:25.785161969 +0000 UTC m=+145.716628286" Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.829086 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.829386 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.329374982 +0000 UTC m=+146.260841289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: W0930 13:37:25.844136 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb8ece4e_276a_4a00_bc65_41977183f112.slice/crio-37711b060a90a971d3b7fd57934ad1a152ea8114d9c2c5cf99828d897383f442 WatchSource:0}: Error finding container 37711b060a90a971d3b7fd57934ad1a152ea8114d9c2c5cf99828d897383f442: Status 404 returned error can't find the container with id 37711b060a90a971d3b7fd57934ad1a152ea8114d9c2c5cf99828d897383f442 Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.930098 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.930467 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.430440139 +0000 UTC m=+146.361906446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:25 crc kubenswrapper[4783]: I0930 13:37:25.930845 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:25 crc kubenswrapper[4783]: E0930 13:37:25.931285 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.431277685 +0000 UTC m=+146.362743992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.031785 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.032178 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.532162756 +0000 UTC m=+146.463629063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.133249 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.133703 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.633687747 +0000 UTC m=+146.565154054 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.234414 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.234538 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.734519547 +0000 UTC m=+146.665985854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.234792 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.235104 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.735092925 +0000 UTC m=+146.666559232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.335606 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.335983 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.835950155 +0000 UTC m=+146.767416472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.436762 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.437690 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:26.937665592 +0000 UTC m=+146.869131929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.538088 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.538241 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.038205693 +0000 UTC m=+146.969672010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.538781 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.539122 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.039112091 +0000 UTC m=+146.970578408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.639373 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.639542 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.139521477 +0000 UTC m=+147.070987794 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.639627 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.639897 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.139889149 +0000 UTC m=+147.071355456 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.685412 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" event={"ID":"7160bb17-05ab-419f-95a2-3a02e4f0770a","Type":"ContainerStarted","Data":"9deb7d2ba27d505e41fd20a7bf68f57b8d43ad472f4c43958947e57d2f927b6d"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.695000 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" event={"ID":"06c6f17e-509e-47c9-a7fa-26cc13ed6012","Type":"ContainerStarted","Data":"83597d80a2285b30666621b355beac146b0b08523594c096fb79b4f4b837df46"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.696786 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" event={"ID":"718955ee-bebc-4bab-8658-3e9d8a782b5c","Type":"ContainerStarted","Data":"5b42906ff8b08b3aa18129785571b47c61bd120de2bbb390c5d8859e52e473f6"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.698729 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z67sw" event={"ID":"5e34af40-7563-4772-bd48-cc31a0354c25","Type":"ContainerStarted","Data":"ddc12ace4d979e36f9b5e0d952819e943109c4eb1895002fc12bee3544c56791"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.700602 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" event={"ID":"91c12832-2428-4e1c-b9de-18936239646c","Type":"ContainerStarted","Data":"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.702290 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" event={"ID":"6db110f9-7517-4393-ad1a-b621ed8b64f6","Type":"ContainerStarted","Data":"53809ef6968936adca61c40548f83385ec039ad02da5777c709432f5a8a641f1"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.703737 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" event={"ID":"76bf7889-7f8d-4b56-a600-b57a329cb120","Type":"ContainerStarted","Data":"43c557b61a7e8c5bb03a2e943925d4bf9247600a49bef68630a000539e94606b"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.704914 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" event={"ID":"3039e69c-4d9b-448f-a82d-748101164cfd","Type":"ContainerStarted","Data":"b84612afcca18e65c93e27041c375e2883ba4e3e14e3ca5350009d9fb2b6cc17"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.706984 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6shjd" event={"ID":"d5b11b65-a14d-4f79-9c43-fbb5e93882aa","Type":"ContainerStarted","Data":"6eb839245e6f7a15e1309e76c2a5c840eb8b36664d729d3f85b902ef86e5e8a1"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.708912 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x9v6j" event={"ID":"ca271c35-f2e4-4c56-a82b-4f47591904f1","Type":"ContainerStarted","Data":"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.709709 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" event={"ID":"fb8ece4e-276a-4a00-bc65-41977183f112","Type":"ContainerStarted","Data":"37711b060a90a971d3b7fd57934ad1a152ea8114d9c2c5cf99828d897383f442"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.711299 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" event={"ID":"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa","Type":"ContainerStarted","Data":"a7b4c3bd868fd68d03405a3d082f1215cd4ad74c47ffca1eb24aaec732000927"} Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.711864 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.712533 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.713028 4783 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7z2r4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.713070 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.715043 4783 patch_prober.go:28] interesting pod/console-operator-58897d9998-946mg container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.715072 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-946mg" podUID="2dd54ba0-375c-4230-9d39-3ef77c055e7f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.715895 4783 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-fvldh container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.715922 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.740730 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.740864 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.240839772 +0000 UTC m=+147.172306079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.741859 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.742303 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.242288318 +0000 UTC m=+147.173754625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.747962 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" podStartSLOduration=121.747942128 podStartE2EDuration="2m1.747942128s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:26.747265616 +0000 UTC m=+146.678731963" watchObservedRunningTime="2025-09-30 13:37:26.747942128 +0000 UTC m=+146.679408435" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.766929 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-946mg" podStartSLOduration=121.766905819 podStartE2EDuration="2m1.766905819s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:26.762128088 +0000 UTC m=+146.693594405" watchObservedRunningTime="2025-09-30 13:37:26.766905819 +0000 UTC m=+146.698372136" Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.845047 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.845288 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.345254045 +0000 UTC m=+147.276720362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.846434 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.847190 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.347173816 +0000 UTC m=+147.278640133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.947357 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.947537 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.447516969 +0000 UTC m=+147.378983276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:26 crc kubenswrapper[4783]: I0930 13:37:26.947667 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:26 crc kubenswrapper[4783]: E0930 13:37:26.947918 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.447909392 +0000 UTC m=+147.379375699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.048671 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.048909 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.548882536 +0000 UTC m=+147.480348843 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.150360 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.150659 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.650644125 +0000 UTC m=+147.582110432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.251795 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.251932 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.751913568 +0000 UTC m=+147.683379875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.252055 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.252692 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.752679402 +0000 UTC m=+147.684145709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.353944 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.354174 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.854125151 +0000 UTC m=+147.785591468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.354400 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.354809 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.854789032 +0000 UTC m=+147.786255349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.458313 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.458771 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:27.958755051 +0000 UTC m=+147.890221358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.559888 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.560393 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.060376465 +0000 UTC m=+147.991842782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.661199 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.661355 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.161330008 +0000 UTC m=+148.092796305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.663464 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.163432606 +0000 UTC m=+148.094898953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.662738 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.717846 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" event={"ID":"8b9a468b-4d63-4019-85aa-970d80f8ed12","Type":"ContainerStarted","Data":"c87f264d1cda3ac8bdf8c40184af481b99e79f938b50ea13140d9ae8ace8e818"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.719822 4783 generic.go:334] "Generic (PLEG): container finished" podID="dbd38476-9515-4ef6-b260-de6a854da0f4" containerID="1ce6c24378dd75ad080afbcaed852a1b1c132b9473ee03e3ca718c2371402446" exitCode=0 Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.719938 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" event={"ID":"dbd38476-9515-4ef6-b260-de6a854da0f4","Type":"ContainerDied","Data":"1ce6c24378dd75ad080afbcaed852a1b1c132b9473ee03e3ca718c2371402446"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.721476 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" event={"ID":"64947936-ca6e-477f-963c-d9c3413c408a","Type":"ContainerStarted","Data":"d878442e124b80bf5c76a617058d0ae76f40b5cc8a40fecb6bcfc7fd6436282c"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.723939 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6l7fj" event={"ID":"5245907c-d3d1-4303-b5cc-e99345fa8f55","Type":"ContainerStarted","Data":"5159a12e8b55046e89132da775f955cd348e3305346a52567ba3fcbae9212e96"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.725711 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" event={"ID":"4957a6e1-74ad-4d72-99c0-a11c24629f13","Type":"ContainerStarted","Data":"1714014cfe2bc42fdc517ec266a4b6118032f7de893ae4075c05ca1b0bdf2961"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.729573 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" event={"ID":"977de9a0-2247-494e-b9b2-3f7296950f1f","Type":"ContainerStarted","Data":"9276cfe97aba982ffeb95d2590922ff0bece115037003279fc3f32c2a87b2ea6"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.731161 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" event={"ID":"a92be6ca-c361-456c-9ed3-1f35aa4652f5","Type":"ContainerStarted","Data":"faa8feab90874af807815f76424ec7a8822d17ba49e40775ff2b8c91c4d8cde0"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.732775 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" event={"ID":"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5","Type":"ContainerStarted","Data":"24495c0b3068a560c24b96bc0f1a2da03347af9a5095743edde53f90d6ae6fb8"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.734284 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" event={"ID":"9c2b300d-113f-4ddc-871f-433b31336f7b","Type":"ContainerStarted","Data":"e415c3950339d124341782e134ceebaaf0eb7bc2fb68a10e15c12f311365347e"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.735603 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-csqxv" event={"ID":"b396639f-2c8b-462d-931f-c2d52fbb9d17","Type":"ContainerStarted","Data":"bb54457e1475bbfc0515e90c76dbfd8e9207edf56163ee38a95a502dfff3df3e"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.736914 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" event={"ID":"3f9c1072-438c-42a1-b380-8e1aefb0116c","Type":"ContainerStarted","Data":"7f44338b8471fa2b4ef20c5a78108a89a2c7750a2e0d3d067f71656132c40684"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.738660 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" event={"ID":"af6b0081-12d7-4b05-b5ff-f2b9d20efa95","Type":"ContainerStarted","Data":"dba620bd98868305210eb1a30d53b4fb481ff4f83c6ac150903f74633862c88d"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.740032 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" event={"ID":"9e2cbddf-22f6-4114-ba55-c45128962978","Type":"ContainerStarted","Data":"60cde46eecb4fbd4ae427ba3d8e7c13a78469c8fa9ba3b5d091f1dcaab714778"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.743622 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" event={"ID":"af39d542-9d45-4afb-8b3c-2d50e9fdfb90","Type":"ContainerStarted","Data":"f007cbd3775294332dd8a9ed839e296dad82595230f1bff221726280a2c22446"} Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.744116 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.744592 4783 patch_prober.go:28] interesting pod/console-operator-58897d9998-946mg container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.744654 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-946mg" podUID="2dd54ba0-375c-4230-9d39-3ef77c055e7f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.745695 4783 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-fvldh container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.745740 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.748351 4783 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-vz2w9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.748396 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.760809 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-jq62m" podStartSLOduration=122.760786904 podStartE2EDuration="2m2.760786904s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.76004336 +0000 UTC m=+147.691509677" watchObservedRunningTime="2025-09-30 13:37:27.760786904 +0000 UTC m=+147.692253231" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.765600 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.768190 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.268169538 +0000 UTC m=+148.199635855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.784415 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" podStartSLOduration=122.784396243 podStartE2EDuration="2m2.784396243s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.780856331 +0000 UTC m=+147.712322668" watchObservedRunningTime="2025-09-30 13:37:27.784396243 +0000 UTC m=+147.715862560" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.800429 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sllkz" podStartSLOduration=122.80041387200001 podStartE2EDuration="2m2.800413872s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.799856844 +0000 UTC m=+147.731323161" watchObservedRunningTime="2025-09-30 13:37:27.800413872 +0000 UTC m=+147.731880189" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.851369 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" podStartSLOduration=122.851349378 podStartE2EDuration="2m2.851349378s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.849519269 +0000 UTC m=+147.780985586" watchObservedRunningTime="2025-09-30 13:37:27.851349378 +0000 UTC m=+147.782815695" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.853349 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" podStartSLOduration=122.853339001 podStartE2EDuration="2m2.853339001s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.82494822 +0000 UTC m=+147.756414537" watchObservedRunningTime="2025-09-30 13:37:27.853339001 +0000 UTC m=+147.784805318" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.867215 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" podStartSLOduration=122.86719185 podStartE2EDuration="2m2.86719185s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.866022703 +0000 UTC m=+147.797489020" watchObservedRunningTime="2025-09-30 13:37:27.86719185 +0000 UTC m=+147.798658177" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.868285 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.868735 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.368721329 +0000 UTC m=+148.300187646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.881305 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-z67sw" podStartSLOduration=122.881290698 podStartE2EDuration="2m2.881290698s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.878589152 +0000 UTC m=+147.810055479" watchObservedRunningTime="2025-09-30 13:37:27.881290698 +0000 UTC m=+147.812757015" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.900608 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6shjd" podStartSLOduration=122.90058714 podStartE2EDuration="2m2.90058714s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.899753033 +0000 UTC m=+147.831219340" watchObservedRunningTime="2025-09-30 13:37:27.90058714 +0000 UTC m=+147.832053457" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.930099 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nlgqp" podStartSLOduration=122.930064135 podStartE2EDuration="2m2.930064135s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:27.928109863 +0000 UTC m=+147.859576180" watchObservedRunningTime="2025-09-30 13:37:27.930064135 +0000 UTC m=+147.861530442" Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.969708 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.969877 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.469862078 +0000 UTC m=+148.401328375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:27 crc kubenswrapper[4783]: I0930 13:37:27.969912 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:27 crc kubenswrapper[4783]: E0930 13:37:27.970328 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.470317812 +0000 UTC m=+148.401784119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.070810 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.070955 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.570927235 +0000 UTC m=+148.502393582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.071144 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.071631 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.571613287 +0000 UTC m=+148.503079634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.172537 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.172848 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.672809898 +0000 UTC m=+148.604276215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.172983 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.173526 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.673505359 +0000 UTC m=+148.604971686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.274681 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.275172 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.775139454 +0000 UTC m=+148.706605781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.376189 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.376798 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.876768209 +0000 UTC m=+148.808234526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.482416 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.483397 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:28.983378872 +0000 UTC m=+148.914845169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.568713 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.569446 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.569504 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.584688 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.585196 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.085180212 +0000 UTC m=+149.016646519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.717896 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.718171 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.21812829 +0000 UTC m=+149.149594637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.718469 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.718894 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.218865244 +0000 UTC m=+149.150331571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.757716 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" event={"ID":"06c6f17e-509e-47c9-a7fa-26cc13ed6012","Type":"ContainerStarted","Data":"596a6bbff97da457a6e2a336f0e3d817567f0c883c14ade520f0ee2a44d4ea19"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.782578 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" event={"ID":"9cd8cfc4-ef55-44b7-8fc4-9c268d33d2a7","Type":"ContainerStarted","Data":"cc965ebdababc49d28a464e92ce7fdb21b50e4c679a9aad4411cff70c58edafb"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.784279 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" event={"ID":"76bf7889-7f8d-4b56-a600-b57a329cb120","Type":"ContainerStarted","Data":"ca52f5eab72d424a6f00626f758388f573f17b5e1b09db21ad28a0e060190fcf"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.785080 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.787696 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" event={"ID":"3bc346e5-7f91-4375-ac44-6bf5fa06f4fa","Type":"ContainerStarted","Data":"b479d3913b2f3bdb2c2fffb3819f5595a2dcaafddc6e1685b8c341101ee8ad9e"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.788392 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.792942 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" event={"ID":"3039e69c-4d9b-448f-a82d-748101164cfd","Type":"ContainerStarted","Data":"cbdb96118241d694c4bc06709b2851409573150a034cbd711d8d45295a07aa99"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.793366 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.796529 4783 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-h7ww6 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.796580 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" podUID="76bf7889-7f8d-4b56-a600-b57a329cb120" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.799325 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" event={"ID":"7160bb17-05ab-419f-95a2-3a02e4f0770a","Type":"ContainerStarted","Data":"1ad6372d19736aae52241cf34e8de73d788419d59814ee4948631b9055ed184e"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.802333 4783 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fglf6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.802374 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" podUID="3039e69c-4d9b-448f-a82d-748101164cfd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.802438 4783 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-8p9wh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.802452 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" podUID="3bc346e5-7f91-4375-ac44-6bf5fa06f4fa" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.802558 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b6c2n" podStartSLOduration=123.802550119 podStartE2EDuration="2m3.802550119s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.802180407 +0000 UTC m=+148.733646724" watchObservedRunningTime="2025-09-30 13:37:28.802550119 +0000 UTC m=+148.734016416" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.804416 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" event={"ID":"38a97782-7b47-42b2-aea4-6e310de9d476","Type":"ContainerStarted","Data":"6488e4742b6c812549089e59902e4318e0f283853b5d77faf33043d382181878"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.808300 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" event={"ID":"a2ac21c3-4001-4c91-851f-bcde41192c27","Type":"ContainerStarted","Data":"e40ea5c76e83cd731c14b95fadd38a2188f83f91b6af295a0a5061ca2773a9a2"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.814560 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6j7xf" event={"ID":"b9eab506-e988-48ab-94d2-32ffd62adb75","Type":"ContainerStarted","Data":"f5e9e5b0c13669320a93924238f2005248f004cadc717bc21293f1477f7d71cb"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.816600 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" event={"ID":"fb8ece4e-276a-4a00-bc65-41977183f112","Type":"ContainerStarted","Data":"930e3b63c25e2b4f3d6dc2497b1ca344fcde5e80eff97af9d1e57111a444de3c"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.821357 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" event={"ID":"01b72b4c-9858-4ddf-9436-557dbb523e7d","Type":"ContainerStarted","Data":"75aca8ae2146538690d962502df572f510ee2ad20381c6889194d0a681a61445"} Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.821907 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.822204 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.322188952 +0000 UTC m=+149.253655259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.823110 4783 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-vz2w9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.823144 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.866511 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2ppb5" podStartSLOduration=123.866493488 podStartE2EDuration="2m3.866493488s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.833564443 +0000 UTC m=+148.765030750" watchObservedRunningTime="2025-09-30 13:37:28.866493488 +0000 UTC m=+148.797959795" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.899069 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" podStartSLOduration=123.899053771 podStartE2EDuration="2m3.899053771s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.872063415 +0000 UTC m=+148.803529712" watchObservedRunningTime="2025-09-30 13:37:28.899053771 +0000 UTC m=+148.830520078" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.920445 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" podStartSLOduration=123.92043037 podStartE2EDuration="2m3.92043037s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.89934742 +0000 UTC m=+148.830813737" watchObservedRunningTime="2025-09-30 13:37:28.92043037 +0000 UTC m=+148.851896677" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.922830 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:28 crc kubenswrapper[4783]: E0930 13:37:28.927900 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.427881906 +0000 UTC m=+149.359348213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.944998 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" podStartSLOduration=123.944982768 podStartE2EDuration="2m3.944982768s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.922096912 +0000 UTC m=+148.853563219" watchObservedRunningTime="2025-09-30 13:37:28.944982768 +0000 UTC m=+148.876449075" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.965718 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jj65k" podStartSLOduration=123.965701815 podStartE2EDuration="2m3.965701815s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.944770302 +0000 UTC m=+148.876236609" watchObservedRunningTime="2025-09-30 13:37:28.965701815 +0000 UTC m=+148.897168122" Sep 30 13:37:28 crc kubenswrapper[4783]: I0930 13:37:28.984566 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wf8bd" podStartSLOduration=123.984548214 podStartE2EDuration="2m3.984548214s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.967596796 +0000 UTC m=+148.899063103" watchObservedRunningTime="2025-09-30 13:37:28.984548214 +0000 UTC m=+148.916014521" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.019722 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5fpgz" podStartSLOduration=124.019706749 podStartE2EDuration="2m4.019706749s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:28.985345458 +0000 UTC m=+148.916811765" watchObservedRunningTime="2025-09-30 13:37:29.019706749 +0000 UTC m=+148.951173056" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.020306 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-458dz" podStartSLOduration=124.020302088 podStartE2EDuration="2m4.020302088s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.019378098 +0000 UTC m=+148.950844395" watchObservedRunningTime="2025-09-30 13:37:29.020302088 +0000 UTC m=+148.951768395" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.024639 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.025047 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.525034438 +0000 UTC m=+149.456500745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.057405 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-csqxv" podStartSLOduration=9.057385895 podStartE2EDuration="9.057385895s" podCreationTimestamp="2025-09-30 13:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.055379151 +0000 UTC m=+148.986845458" watchObservedRunningTime="2025-09-30 13:37:29.057385895 +0000 UTC m=+148.988852212" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.070957 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-npcpf" podStartSLOduration=124.070942455 podStartE2EDuration="2m4.070942455s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.069177708 +0000 UTC m=+149.000644025" watchObservedRunningTime="2025-09-30 13:37:29.070942455 +0000 UTC m=+149.002408762" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.127500 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mls7z" podStartSLOduration=124.127479469 podStartE2EDuration="2m4.127479469s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.124165834 +0000 UTC m=+149.055632151" watchObservedRunningTime="2025-09-30 13:37:29.127479469 +0000 UTC m=+149.058945776" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.127661 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kqqxv" podStartSLOduration=124.127656435 podStartE2EDuration="2m4.127656435s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.090515386 +0000 UTC m=+149.021981703" watchObservedRunningTime="2025-09-30 13:37:29.127656435 +0000 UTC m=+149.059122742" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.128438 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.128775 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.628761039 +0000 UTC m=+149.560227346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.211072 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-x9v6j" podStartSLOduration=124.211056171 podStartE2EDuration="2m4.211056171s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.209498141 +0000 UTC m=+149.140964448" watchObservedRunningTime="2025-09-30 13:37:29.211056171 +0000 UTC m=+149.142522478" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.230917 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.231229 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.73120233 +0000 UTC m=+149.662668637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.248276 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6l7fj" podStartSLOduration=9.24820266 podStartE2EDuration="9.24820266s" podCreationTimestamp="2025-09-30 13:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.246911538 +0000 UTC m=+149.178377845" watchObservedRunningTime="2025-09-30 13:37:29.24820266 +0000 UTC m=+149.179668977" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.287674 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" podStartSLOduration=124.287661211 podStartE2EDuration="2m4.287661211s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.285627886 +0000 UTC m=+149.217094193" watchObservedRunningTime="2025-09-30 13:37:29.287661211 +0000 UTC m=+149.219127518" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.331943 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.332344 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.832328319 +0000 UTC m=+149.763794626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.433711 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.433906 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.93387607 +0000 UTC m=+149.865342387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.434031 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.434341 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:29.934325435 +0000 UTC m=+149.865791742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.535238 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.535462 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.035430373 +0000 UTC m=+149.966896690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.535528 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.535903 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.035891018 +0000 UTC m=+149.967357405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.568539 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.568603 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.636659 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.636868 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.13682531 +0000 UTC m=+150.068291617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.637036 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.637340 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.137328216 +0000 UTC m=+150.068794523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.737989 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.738133 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.238115404 +0000 UTC m=+150.169581711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.738244 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.738330 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.738380 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.738656 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.23864796 +0000 UTC m=+150.170114267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.739323 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.750399 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.822986 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6j7xf" event={"ID":"b9eab506-e988-48ab-94d2-32ffd62adb75","Type":"ContainerStarted","Data":"1e72c59540d65ee91655450a2eed552df9fdb454a920cd8e6434a0ace84a9492"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.823175 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.825053 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" event={"ID":"dbd38476-9515-4ef6-b260-de6a854da0f4","Type":"ContainerStarted","Data":"00911524c8b3a47affb51b39ecac76ea65ab38babda9b3a7d9e70677371eee2b"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.825198 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.826924 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" event={"ID":"fb8ece4e-276a-4a00-bc65-41977183f112","Type":"ContainerStarted","Data":"1d44520bb654466e3ddd0edb3ff3067f61312ac10800b50b96a99671b847a4d9"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.828676 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" event={"ID":"d9b9dda7-ec4e-4773-b6a1-9f5636370dc5","Type":"ContainerStarted","Data":"58f12585486de5cb8dd10313dffcaf94e25b7732755191271af6c6dbc5effb3f"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.830277 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" event={"ID":"af39d542-9d45-4afb-8b3c-2d50e9fdfb90","Type":"ContainerStarted","Data":"ae6bef2bc5cedf7e42112b168ec8f3c0d8cdb364ab9a280f57c9acba40e13913"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.831809 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" event={"ID":"9c2b300d-113f-4ddc-871f-433b31336f7b","Type":"ContainerStarted","Data":"6e0250e74fc7e7c63553f55d31c61255243c781a16533596f4572b4382249043"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.833470 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" event={"ID":"3f9c1072-438c-42a1-b380-8e1aefb0116c","Type":"ContainerStarted","Data":"87af70faf011e1d4f979701b87e5ddd6023bf62cad0c4cfcb1f89738876b45c4"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.835779 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" event={"ID":"8b9a468b-4d63-4019-85aa-970d80f8ed12","Type":"ContainerStarted","Data":"a7ea027eafe0f7a78f20eade51721e6b36a57a0806a669d3541088e0e2c2ad83"} Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.835808 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.837897 4783 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-h7ww6 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.837937 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" podUID="76bf7889-7f8d-4b56-a600-b57a329cb120" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.837950 4783 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fglf6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.837975 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" podUID="3039e69c-4d9b-448f-a82d-748101164cfd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.838768 4783 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-8p9wh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.838796 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" podUID="3bc346e5-7f91-4375-ac44-6bf5fa06f4fa" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.838861 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.838891 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6j7xf" podStartSLOduration=9.838871861 podStartE2EDuration="9.838871861s" podCreationTimestamp="2025-09-30 13:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.838711456 +0000 UTC m=+149.770177763" watchObservedRunningTime="2025-09-30 13:37:29.838871861 +0000 UTC m=+149.770338178" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.838982 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.338965784 +0000 UTC m=+150.270432101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.839088 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.839131 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.839213 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.839893 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.339880353 +0000 UTC m=+150.271346660 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.843419 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.847499 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.861773 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.892767 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.905104 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qmsj2" podStartSLOduration=124.905084752 podStartE2EDuration="2m4.905084752s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.872644283 +0000 UTC m=+149.804110590" watchObservedRunningTime="2025-09-30 13:37:29.905084752 +0000 UTC m=+149.836551069" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.905764 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kgm46" podStartSLOduration=124.905740843 podStartE2EDuration="2m4.905740843s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.904738261 +0000 UTC m=+149.836204568" watchObservedRunningTime="2025-09-30 13:37:29.905740843 +0000 UTC m=+149.837207160" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.907942 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.936061 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" podStartSLOduration=124.936044314 podStartE2EDuration="2m4.936044314s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.932999287 +0000 UTC m=+149.864465614" watchObservedRunningTime="2025-09-30 13:37:29.936044314 +0000 UTC m=+149.867510621" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.940711 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:29 crc kubenswrapper[4783]: E0930 13:37:29.941810 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.441796267 +0000 UTC m=+150.373262574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.976173 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-njt7f" podStartSLOduration=124.976160077 podStartE2EDuration="2m4.976160077s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.97437522 +0000 UTC m=+149.905841527" watchObservedRunningTime="2025-09-30 13:37:29.976160077 +0000 UTC m=+149.907626384" Sep 30 13:37:29 crc kubenswrapper[4783]: I0930 13:37:29.976819 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" podStartSLOduration=124.976811847 podStartE2EDuration="2m4.976811847s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:29.952253768 +0000 UTC m=+149.883720075" watchObservedRunningTime="2025-09-30 13:37:29.976811847 +0000 UTC m=+149.908278154" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.003748 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-48n8x" podStartSLOduration=125.003732282 podStartE2EDuration="2m5.003732282s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:30.00020526 +0000 UTC m=+149.931671587" watchObservedRunningTime="2025-09-30 13:37:30.003732282 +0000 UTC m=+149.935198579" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.023277 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2t6xf" podStartSLOduration=125.023242171 podStartE2EDuration="2m5.023242171s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:30.021995981 +0000 UTC m=+149.953462288" watchObservedRunningTime="2025-09-30 13:37:30.023242171 +0000 UTC m=+149.954708488" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.042496 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.042897 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.542881484 +0000 UTC m=+150.474347791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.045484 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-k8cdm" podStartSLOduration=125.045469856 podStartE2EDuration="2m5.045469856s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:30.04433155 +0000 UTC m=+149.975797857" watchObservedRunningTime="2025-09-30 13:37:30.045469856 +0000 UTC m=+149.976936163" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.144697 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.145332 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.645317794 +0000 UTC m=+150.576784101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.246817 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.247455 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.747442955 +0000 UTC m=+150.678909262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.349843 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.350259 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.850238326 +0000 UTC m=+150.781704633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: W0930 13:37:30.425699 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-5199d0ce204409eb1b1e9ac07e9ad5afbee512a8a8557f4589b36a2912cf70eb WatchSource:0}: Error finding container 5199d0ce204409eb1b1e9ac07e9ad5afbee512a8a8557f4589b36a2912cf70eb: Status 404 returned error can't find the container with id 5199d0ce204409eb1b1e9ac07e9ad5afbee512a8a8557f4589b36a2912cf70eb Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.452942 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.453550 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:30.953524433 +0000 UTC m=+150.884990740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.554292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.554518 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.054489767 +0000 UTC m=+150.985956074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.570440 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:30 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:30 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:30 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.570498 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.655941 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.656298 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.156280777 +0000 UTC m=+151.087747084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.756652 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.756850 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.256825507 +0000 UTC m=+151.188291814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.756944 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.757212 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.257200059 +0000 UTC m=+151.188666366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.846974 4783 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-h7ww6 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.847283 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" podUID="76bf7889-7f8d-4b56-a600-b57a329cb120" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.847677 4783 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-8p9wh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.847720 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" podUID="3bc346e5-7f91-4375-ac44-6bf5fa06f4fa" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849595 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f7b2ee9ed1a5e58ddbaa8996183513f1e80e7a5c5ebd18381d5a5607d576853a"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849631 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7f504bb4f1358b8b1a274db92f4ea4c2e1a0ba8d4ac5e873c326dc26158b76a5"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849645 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" event={"ID":"1f2a2e69-6d09-4819-ac74-66a5806697e0","Type":"ContainerStarted","Data":"1d9925b779440e1f4cb4cef28261f565fed9797975b05dc7e2101f4173f9269c"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849660 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4c869f4d281778f03bc4f94b291f58a99d1b532d1255f64a3f3fb94e18d6f9fe"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849673 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5199d0ce204409eb1b1e9ac07e9ad5afbee512a8a8557f4589b36a2912cf70eb"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849685 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c70b5f4aae7bc94118bfaa8e8b34931913eb2fcf2b7b75d216e482d4f92fb845"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.849706 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7c0e3c8da4540aef937c1681b3e4eb464da6dcfbf5f14480f69e27a826826ceb"} Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.857696 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.857963 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.357948126 +0000 UTC m=+151.289414433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.858077 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.858383 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.358374749 +0000 UTC m=+151.289841056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.958602 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.958818 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.458801786 +0000 UTC m=+151.390268093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:30 crc kubenswrapper[4783]: I0930 13:37:30.959149 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:30 crc kubenswrapper[4783]: E0930 13:37:30.960500 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.460491109 +0000 UTC m=+151.391957416 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.059838 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.060027 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.560002576 +0000 UTC m=+151.491468883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.060349 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.060654 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.560647187 +0000 UTC m=+151.492113494 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.161204 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.161405 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.661374693 +0000 UTC m=+151.592841000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.161474 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.161804 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.661790596 +0000 UTC m=+151.593256903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.262824 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.263205 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.763181564 +0000 UTC m=+151.694647871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.263341 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.263635 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.763623588 +0000 UTC m=+151.695089895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.365025 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.365174 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.865152219 +0000 UTC m=+151.796618526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.365406 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.365691 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.865680575 +0000 UTC m=+151.797146882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.466512 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.466874 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:31.966849436 +0000 UTC m=+151.898315743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.568471 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.568804 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.06878943 +0000 UTC m=+152.000255747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.570605 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:31 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:31 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:31 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.570658 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.669473 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.669617 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.169596519 +0000 UTC m=+152.101062826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.669683 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.670031 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.170021072 +0000 UTC m=+152.101487379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.771272 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.771476 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.27145482 +0000 UTC m=+152.202921127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.771525 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.771887 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.271876924 +0000 UTC m=+152.203343231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.856572 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.872490 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.872697 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.372668152 +0000 UTC m=+152.304134459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.872783 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.873088 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.373076145 +0000 UTC m=+152.304542452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.973421 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.973641 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.473609555 +0000 UTC m=+152.405075862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:31 crc kubenswrapper[4783]: I0930 13:37:31.973878 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:31 crc kubenswrapper[4783]: E0930 13:37:31.974207 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.474193664 +0000 UTC m=+152.405659971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.048421 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.048495 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.069301 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.075498 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.075843 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.575634672 +0000 UTC m=+152.507100979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.075941 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.076219 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.57621232 +0000 UTC m=+152.507678627 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.107740 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.107812 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.140174 4783 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-dnx6l container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.140228 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" podUID="dbd38476-9515-4ef6-b260-de6a854da0f4" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.140456 4783 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-dnx6l container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.140472 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" podUID="dbd38476-9515-4ef6-b260-de6a854da0f4" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.176672 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.177426 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.677398331 +0000 UTC m=+152.608864638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.277757 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.279303 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.779289744 +0000 UTC m=+152.710756051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.378804 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.379359 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.879344669 +0000 UTC m=+152.810810976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.480125 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.480459 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:32.980445086 +0000 UTC m=+152.911911393 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.581200 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.581339 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.081315457 +0000 UTC m=+153.012781764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.581437 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.581716 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.081702379 +0000 UTC m=+153.013168686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.583715 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:32 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:32 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:32 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.583750 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.683006 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.683057 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.183040685 +0000 UTC m=+153.114506992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.683459 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.683785 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.183773218 +0000 UTC m=+153.115239525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.784351 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.784533 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.284509274 +0000 UTC m=+153.215975581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.784635 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.785042 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.285034081 +0000 UTC m=+153.216500388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.873469 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6h4jw" Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.885798 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.885985 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.385961193 +0000 UTC m=+153.317427500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.886275 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.886528 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.386521851 +0000 UTC m=+153.317988158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:32 crc kubenswrapper[4783]: I0930 13:37:32.987441 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:32 crc kubenswrapper[4783]: E0930 13:37:32.988387 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.488354123 +0000 UTC m=+153.419820430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.088643 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.089014 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.588982715 +0000 UTC m=+153.520449022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.110142 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.116598 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-946mg" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.126465 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.139877 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.153395 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.154274 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: W0930 13:37:33.156305 4783 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.156338 4783 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.189376 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.190198 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.690146484 +0000 UTC m=+153.621612791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.197103 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.292617 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.292884 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.292921 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd6r5\" (UniqueName: \"kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.292948 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.792932846 +0000 UTC m=+153.724399153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.293003 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.295949 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.306362 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.306392 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.306424 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.306450 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.306998 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.307087 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.340866 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.340906 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.343597 4783 patch_prober.go:28] interesting pod/console-f9d7485db-x9v6j container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.343687 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x9v6j" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.367393 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.368249 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.385201 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.386289 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: W0930 13:37:33.392120 4783 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.392177 4783 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.392429 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.392523 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.394796 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.394966 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.894937302 +0000 UTC m=+153.826403609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395137 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395184 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395265 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd6r5\" (UniqueName: \"kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395328 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.395511 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.89549578 +0000 UTC m=+153.826962087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395870 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.395911 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.434334 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.448816 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd6r5\" (UniqueName: \"kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5\") pod \"certified-operators-dskrd\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.474023 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.498760 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.499053 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.499117 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgb9n\" (UniqueName: \"kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.499139 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.499182 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.499234 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.499333 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:33.999316345 +0000 UTC m=+153.930782652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.531416 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.532473 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.556320 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.568266 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.571455 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:33 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:33 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:33 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.571533 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.574106 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.588003 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.601989 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602049 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602090 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602143 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgb9n\" (UniqueName: \"kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602171 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602192 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8b8v\" (UniqueName: \"kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602239 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602271 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.602297 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.602646 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.102630093 +0000 UTC m=+154.034096400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.603111 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.603391 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.603675 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.651053 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.664490 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgb9n\" (UniqueName: \"kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n\") pod \"community-operators-kqm5j\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.681623 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.703801 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.703987 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.203959138 +0000 UTC m=+154.135425445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.704127 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8b8v\" (UniqueName: \"kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.704169 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.704300 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.704330 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.705931 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.706704 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.206665814 +0000 UTC m=+154.138132121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.706741 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.721459 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.722407 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.730053 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8b8v\" (UniqueName: \"kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v\") pod \"certified-operators-84r4j\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.748111 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.805942 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.806199 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzcl8\" (UniqueName: \"kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.806317 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.806344 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.806474 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.306440449 +0000 UTC m=+154.237906756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.841674 4783 patch_prober.go:28] interesting pod/apiserver-76f77b778f-mt5f7 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]log ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]etcd ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/max-in-flight-filter ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 30 13:37:33 crc kubenswrapper[4783]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 30 13:37:33 crc kubenswrapper[4783]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/project.openshift.io-projectcache ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 30 13:37:33 crc kubenswrapper[4783]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 30 13:37:33 crc kubenswrapper[4783]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 13:37:33 crc kubenswrapper[4783]: livez check failed Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.841742 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" podUID="a2ac21c3-4001-4c91-851f-bcde41192c27" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.911468 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.911518 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.911555 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.911597 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzcl8\" (UniqueName: \"kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: E0930 13:37:33.911909 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.411888136 +0000 UTC m=+154.343354443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.912038 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.912081 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:33 crc kubenswrapper[4783]: I0930 13:37:33.938364 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzcl8\" (UniqueName: \"kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8\") pod \"community-operators-l6679\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.013020 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.013236 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.51318079 +0000 UTC m=+154.444647087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.013819 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.014115 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.514100548 +0000 UTC m=+154.445566845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.115124 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.115599 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.615582418 +0000 UTC m=+154.547048725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.151388 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.200294 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.207646 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.209373 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.214175 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-8p9wh" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.216795 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.217131 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.71711439 +0000 UTC m=+154.648580697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.305666 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-h7ww6" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.310883 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fglf6" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.319007 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.319149 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.819124676 +0000 UTC m=+154.750590983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.319493 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.320730 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.820714027 +0000 UTC m=+154.752180334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.327463 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.337389 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.344564 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.420754 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.421448 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:34.921431883 +0000 UTC m=+154.852898190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.522145 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.522892 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.02284833 +0000 UTC m=+154.954314637 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.574037 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:34 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:34 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:34 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.574091 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.623963 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.626736 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.126692075 +0000 UTC m=+155.058158382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.683836 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.726929 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.727270 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.227257696 +0000 UTC m=+155.158724003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.828158 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.828337 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.328309002 +0000 UTC m=+155.259775309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.828781 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.829181 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.32916664 +0000 UTC m=+155.260632947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.835904 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:37:34 crc kubenswrapper[4783]: W0930 13:37:34.845590 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded506947_4aea_481f_92e6_be13c8bb206b.slice/crio-b1806b1f3ace7df780f7372267f4146e1a00f737a20fe215ed6c38a990bc399e WatchSource:0}: Error finding container b1806b1f3ace7df780f7372267f4146e1a00f737a20fe215ed6c38a990bc399e: Status 404 returned error can't find the container with id b1806b1f3ace7df780f7372267f4146e1a00f737a20fe215ed6c38a990bc399e Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.899509 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerStarted","Data":"f54da8ea804e24f6108f9cc44bb51d3b57362a04254a432c0f8d8b0930ce4d33"} Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.917352 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" event={"ID":"1f2a2e69-6d09-4819-ac74-66a5806697e0","Type":"ContainerStarted","Data":"4c4c95781989adb1d6cc9f6a1b0afbf1297dc16740b9c82e13209d980906b43e"} Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.917432 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.929203 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bc1ba935-2f99-4188-b2df-d657dc7c28bc","Type":"ContainerStarted","Data":"7f6356bcf63754095d21f5230ff18a99c5f484015eee612e1fc2a4a76f06c120"} Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.929294 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bc1ba935-2f99-4188-b2df-d657dc7c28bc","Type":"ContainerStarted","Data":"4130597099acc25f1237f13918e098b5d6c47f3ceacf5ede39432acc93f596bf"} Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.930191 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:34 crc kubenswrapper[4783]: E0930 13:37:34.930724 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.430695682 +0000 UTC m=+155.362161989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.937613 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerStarted","Data":"b1806b1f3ace7df780f7372267f4146e1a00f737a20fe215ed6c38a990bc399e"} Sep 30 13:37:34 crc kubenswrapper[4783]: W0930 13:37:34.937610 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8918f0d_0293_43a1_923a_6e3c4a9ceb81.slice/crio-a9322cc966f7193efd1be99863150ed0511a600e5ceb5e91001fef31582ff6c2 WatchSource:0}: Error finding container a9322cc966f7193efd1be99863150ed0511a600e5ceb5e91001fef31582ff6c2: Status 404 returned error can't find the container with id a9322cc966f7193efd1be99863150ed0511a600e5ceb5e91001fef31582ff6c2 Sep 30 13:37:34 crc kubenswrapper[4783]: I0930 13:37:34.950412 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.9503867160000001 podStartE2EDuration="1.950386716s" podCreationTimestamp="2025-09-30 13:37:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:34.949029593 +0000 UTC m=+154.880495900" watchObservedRunningTime="2025-09-30 13:37:34.950386716 +0000 UTC m=+154.881853033" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.030099 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.034575 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.035031 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.535013472 +0000 UTC m=+155.466479779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.135988 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.136169 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.63613886 +0000 UTC m=+155.567605167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.136241 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.136571 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.636559123 +0000 UTC m=+155.568025430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.143738 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dnx6l" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.200135 4783 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.238128 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.239154 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.739135698 +0000 UTC m=+155.670602005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.304359 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.305592 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.306911 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.317056 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.340409 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.340841 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.840816724 +0000 UTC m=+155.772283031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.441978 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.442347 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.442408 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l9x8\" (UniqueName: \"kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.442459 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.442605 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:35.942572783 +0000 UTC m=+155.874039100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.543352 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l9x8\" (UniqueName: \"kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.543640 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.543822 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.543953 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.544097 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.544121 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.044106005 +0000 UTC m=+155.975572312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.544537 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.564704 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l9x8\" (UniqueName: \"kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8\") pod \"redhat-marketplace-g7rj5\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.570639 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:35 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:35 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:35 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.570680 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.645660 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.646334 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.146317187 +0000 UTC m=+156.077783484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.702808 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.703988 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.725939 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.748316 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.748682 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.248670636 +0000 UTC m=+156.180136943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.819397 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.849513 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.849659 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.349634229 +0000 UTC m=+156.281100536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.850020 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.850062 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.850121 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzd5b\" (UniqueName: \"kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.850145 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.850760 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.350751774 +0000 UTC m=+156.282218081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.946049 4783 generic.go:334] "Generic (PLEG): container finished" podID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerID="a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.946215 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerDied","Data":"a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.946401 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerStarted","Data":"a9322cc966f7193efd1be99863150ed0511a600e5ceb5e91001fef31582ff6c2"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.948540 4783 generic.go:334] "Generic (PLEG): container finished" podID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerID="614ec72bebd2ce49c964886fb36513087d70ac1268c09567b361f195a0ff9d09" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.948600 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerDied","Data":"614ec72bebd2ce49c964886fb36513087d70ac1268c09567b361f195a0ff9d09"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951088 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951117 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.951254 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.451210651 +0000 UTC m=+156.382676968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951358 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951424 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951494 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzd5b\" (UniqueName: \"kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.951524 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: E0930 13:37:35.951738 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.451724188 +0000 UTC m=+156.383190495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwvfx" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.952371 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.953104 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" event={"ID":"1f2a2e69-6d09-4819-ac74-66a5806697e0","Type":"ContainerStarted","Data":"cd8e83b2e72688b65c0fb00c336f7b9d8cef37cd1cef811d10450f94f6662921"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.954269 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.956166 4783 generic.go:334] "Generic (PLEG): container finished" podID="bc1ba935-2f99-4188-b2df-d657dc7c28bc" containerID="7f6356bcf63754095d21f5230ff18a99c5f484015eee612e1fc2a4a76f06c120" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.956246 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bc1ba935-2f99-4188-b2df-d657dc7c28bc","Type":"ContainerDied","Data":"7f6356bcf63754095d21f5230ff18a99c5f484015eee612e1fc2a4a76f06c120"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.957758 4783 generic.go:334] "Generic (PLEG): container finished" podID="6af2276a-3ae6-4c19-b75c-935d765d3890" containerID="af7a544648adb8b681260694207537bd4b9123dbb3afae22df9becc0d3095939" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.957800 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" event={"ID":"6af2276a-3ae6-4c19-b75c-935d765d3890","Type":"ContainerDied","Data":"af7a544648adb8b681260694207537bd4b9123dbb3afae22df9becc0d3095939"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.962592 4783 generic.go:334] "Generic (PLEG): container finished" podID="94e799b5-88f4-4957-99b6-112c0dc06105" containerID="3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.962650 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerDied","Data":"3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.962677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerStarted","Data":"e82673f8b0a8eedb32eb081dc4b38854af051feb5ef448c662d1517cfe38ae9e"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.972151 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed506947-4aea-481f-92e6-be13c8bb206b" containerID="481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576" exitCode=0 Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.972196 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerDied","Data":"481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576"} Sep 30 13:37:35 crc kubenswrapper[4783]: I0930 13:37:35.986258 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzd5b\" (UniqueName: \"kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b\") pod \"redhat-marketplace-flrp7\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.016144 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.013555 4783 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T13:37:35.200158512Z","Handler":null,"Name":""} Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.020680 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.026043 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.026344 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.030583 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.042534 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.052579 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:36 crc kubenswrapper[4783]: E0930 13:37:36.054388 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 13:37:36.554372735 +0000 UTC m=+156.485839042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.055300 4783 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.055359 4783 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.071167 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.154375 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.154431 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.154518 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.166774 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.166815 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.191453 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwvfx\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.255468 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.255626 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.255696 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.255812 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.268046 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.274736 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.275945 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: W0930 13:37:36.281115 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01cead24_2173_4d6e_90b3_5a22bec86918.slice/crio-f51ed38968b9c93ea91681572260ae58f8aba7167c775d08b299e71bc3de097c WatchSource:0}: Error finding container f51ed38968b9c93ea91681572260ae58f8aba7167c775d08b299e71bc3de097c: Status 404 returned error can't find the container with id f51ed38968b9c93ea91681572260ae58f8aba7167c775d08b299e71bc3de097c Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.300762 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.302093 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.304552 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.309039 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.344131 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.399622 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.460941 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.461146 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.461251 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94gfz\" (UniqueName: \"kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.561931 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94gfz\" (UniqueName: \"kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.562245 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.562309 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.562797 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.562889 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.573661 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:36 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:36 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:36 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.573744 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.589924 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94gfz\" (UniqueName: \"kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz\") pod \"redhat-operators-vz86z\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.619558 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.676986 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:37:36 crc kubenswrapper[4783]: W0930 13:37:36.695052 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfcd81db_a627_438a_92b3_f5fcabeff1c4.slice/crio-cbd59e15182837ebbaf932c7071efdf6dfcee99d6a32f25e0e813f988022b9ba WatchSource:0}: Error finding container cbd59e15182837ebbaf932c7071efdf6dfcee99d6a32f25e0e813f988022b9ba: Status 404 returned error can't find the container with id cbd59e15182837ebbaf932c7071efdf6dfcee99d6a32f25e0e813f988022b9ba Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.707042 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.708014 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.727785 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.764057 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.818326 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:37:36 crc kubenswrapper[4783]: W0930 13:37:36.830990 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d67aba9_ee2c_4608_98d5_f3e6f248ea66.slice/crio-eab7872d9e5171db9e5d9683e19495063f325e800ef8c1f5f28431066f4e6e11 WatchSource:0}: Error finding container eab7872d9e5171db9e5d9683e19495063f325e800ef8c1f5f28431066f4e6e11: Status 404 returned error can't find the container with id eab7872d9e5171db9e5d9683e19495063f325e800ef8c1f5f28431066f4e6e11 Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.850090 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.865544 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.865607 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78dth\" (UniqueName: \"kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.865691 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.966747 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.966823 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.966894 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78dth\" (UniqueName: \"kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.967242 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.967297 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:36 crc kubenswrapper[4783]: I0930 13:37:36.983959 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78dth\" (UniqueName: \"kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth\") pod \"redhat-operators-42xzs\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.064678 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerStarted","Data":"eab7872d9e5171db9e5d9683e19495063f325e800ef8c1f5f28431066f4e6e11"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.065919 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.068341 4783 generic.go:334] "Generic (PLEG): container finished" podID="66113e2e-c750-47b9-be53-81e4eddd9202" containerID="07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59" exitCode=0 Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.068426 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerDied","Data":"07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.068463 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerStarted","Data":"a89d89c62948e3e1db1080e82c1ff44b9b5c8684cfd4a38c8b85a7941bff7286"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.074671 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" event={"ID":"1f2a2e69-6d09-4819-ac74-66a5806697e0","Type":"ContainerStarted","Data":"33a1d312f5da4a622998a5912cd693178699d4e6fb9e060460a97746bb34523d"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.079236 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" event={"ID":"dfcd81db-a627-438a-92b3-f5fcabeff1c4","Type":"ContainerStarted","Data":"cbd59e15182837ebbaf932c7071efdf6dfcee99d6a32f25e0e813f988022b9ba"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.081578 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48","Type":"ContainerStarted","Data":"3fe8616b1dafcd32882235738f58e7837f4539198c5ac2952df41311f8790bb6"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.083596 4783 generic.go:334] "Generic (PLEG): container finished" podID="01cead24-2173-4d6e-90b3-5a22bec86918" containerID="efa61047671090b006e0d3c4c21db72ee1be7781d87be7dcd53bf3bf84c56e00" exitCode=0 Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.083666 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerDied","Data":"efa61047671090b006e0d3c4c21db72ee1be7781d87be7dcd53bf3bf84c56e00"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.083689 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerStarted","Data":"f51ed38968b9c93ea91681572260ae58f8aba7167c775d08b299e71bc3de097c"} Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.121075 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.127066 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-mt5f7" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.140711 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-4kmnp" podStartSLOduration=17.140684933 podStartE2EDuration="17.140684933s" podCreationTimestamp="2025-09-30 13:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:37.135666814 +0000 UTC m=+157.067133161" watchObservedRunningTime="2025-09-30 13:37:37.140684933 +0000 UTC m=+157.072151250" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.420369 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.477826 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access\") pod \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.477922 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir\") pod \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\" (UID: \"bc1ba935-2f99-4188-b2df-d657dc7c28bc\") " Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.478260 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bc1ba935-2f99-4188-b2df-d657dc7c28bc" (UID: "bc1ba935-2f99-4188-b2df-d657dc7c28bc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.484463 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bc1ba935-2f99-4188-b2df-d657dc7c28bc" (UID: "bc1ba935-2f99-4188-b2df-d657dc7c28bc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.493263 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:37:37 crc kubenswrapper[4783]: W0930 13:37:37.508543 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa3e829d_2caa_4665_80ea_4aeab3a5f220.slice/crio-b8c75bda2f5f1ed3f88a50bafa2c7792e5bd65f2c5f04dd2668c7d6e1f272154 WatchSource:0}: Error finding container b8c75bda2f5f1ed3f88a50bafa2c7792e5bd65f2c5f04dd2668c7d6e1f272154: Status 404 returned error can't find the container with id b8c75bda2f5f1ed3f88a50bafa2c7792e5bd65f2c5f04dd2668c7d6e1f272154 Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.551679 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.570513 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:37 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:37 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:37 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.570573 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.579841 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.579880 4783 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc1ba935-2f99-4188-b2df-d657dc7c28bc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.673729 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.673797 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.680248 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume\") pod \"6af2276a-3ae6-4c19-b75c-935d765d3890\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.680307 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume\") pod \"6af2276a-3ae6-4c19-b75c-935d765d3890\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.680359 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rsmd\" (UniqueName: \"kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd\") pod \"6af2276a-3ae6-4c19-b75c-935d765d3890\" (UID: \"6af2276a-3ae6-4c19-b75c-935d765d3890\") " Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.681305 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume" (OuterVolumeSpecName: "config-volume") pod "6af2276a-3ae6-4c19-b75c-935d765d3890" (UID: "6af2276a-3ae6-4c19-b75c-935d765d3890"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.698839 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6af2276a-3ae6-4c19-b75c-935d765d3890" (UID: "6af2276a-3ae6-4c19-b75c-935d765d3890"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.700858 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd" (OuterVolumeSpecName: "kube-api-access-9rsmd") pod "6af2276a-3ae6-4c19-b75c-935d765d3890" (UID: "6af2276a-3ae6-4c19-b75c-935d765d3890"). InnerVolumeSpecName "kube-api-access-9rsmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.781439 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6af2276a-3ae6-4c19-b75c-935d765d3890-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.781469 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6af2276a-3ae6-4c19-b75c-935d765d3890-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:37 crc kubenswrapper[4783]: I0930 13:37:37.781479 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rsmd\" (UniqueName: \"kubernetes.io/projected/6af2276a-3ae6-4c19-b75c-935d765d3890-kube-api-access-9rsmd\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.091044 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48","Type":"ContainerStarted","Data":"db74fac73f30080735c618c78d5dd766441f231659ec5042e596dac7044a2962"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.093111 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.093162 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42" event={"ID":"6af2276a-3ae6-4c19-b75c-935d765d3890","Type":"ContainerDied","Data":"1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.093243 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b91a7cef9f41b8f9af3b0260d5afd300a082e66b8ab49c1bf98f916395b8024" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.094215 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerStarted","Data":"b8c75bda2f5f1ed3f88a50bafa2c7792e5bd65f2c5f04dd2668c7d6e1f272154"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.095902 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerStarted","Data":"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.098467 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.098481 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bc1ba935-2f99-4188-b2df-d657dc7c28bc","Type":"ContainerDied","Data":"4130597099acc25f1237f13918e098b5d6c47f3ceacf5ede39432acc93f596bf"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.098574 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4130597099acc25f1237f13918e098b5d6c47f3ceacf5ede39432acc93f596bf" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.100326 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" event={"ID":"dfcd81db-a627-438a-92b3-f5fcabeff1c4","Type":"ContainerStarted","Data":"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa"} Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.570384 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:38 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:38 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:38 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.570457 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:38 crc kubenswrapper[4783]: I0930 13:37:38.943235 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6j7xf" Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.107825 4783 generic.go:334] "Generic (PLEG): container finished" podID="4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" containerID="db74fac73f30080735c618c78d5dd766441f231659ec5042e596dac7044a2962" exitCode=0 Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.108247 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48","Type":"ContainerDied","Data":"db74fac73f30080735c618c78d5dd766441f231659ec5042e596dac7044a2962"} Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.111725 4783 generic.go:334] "Generic (PLEG): container finished" podID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerID="72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700" exitCode=0 Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.113127 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerDied","Data":"72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700"} Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.123159 4783 generic.go:334] "Generic (PLEG): container finished" podID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerID="69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85" exitCode=0 Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.123243 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerDied","Data":"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85"} Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.123364 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.149818 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" podStartSLOduration=134.149780841 podStartE2EDuration="2m14.149780841s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:37:39.148694546 +0000 UTC m=+159.080160853" watchObservedRunningTime="2025-09-30 13:37:39.149780841 +0000 UTC m=+159.081247148" Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.569183 4783 patch_prober.go:28] interesting pod/router-default-5444994796-6shjd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 13:37:39 crc kubenswrapper[4783]: [-]has-synced failed: reason withheld Sep 30 13:37:39 crc kubenswrapper[4783]: [+]process-running ok Sep 30 13:37:39 crc kubenswrapper[4783]: healthz check failed Sep 30 13:37:39 crc kubenswrapper[4783]: I0930 13:37:39.569257 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6shjd" podUID="d5b11b65-a14d-4f79-9c43-fbb5e93882aa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.434481 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.519567 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir\") pod \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.519617 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access\") pod \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\" (UID: \"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48\") " Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.519714 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" (UID: "4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.521544 4783 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.524759 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" (UID: "4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.570541 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.574679 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6shjd" Sep 30 13:37:40 crc kubenswrapper[4783]: I0930 13:37:40.624945 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 13:37:41 crc kubenswrapper[4783]: I0930 13:37:41.135820 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48","Type":"ContainerDied","Data":"3fe8616b1dafcd32882235738f58e7837f4539198c5ac2952df41311f8790bb6"} Sep 30 13:37:41 crc kubenswrapper[4783]: I0930 13:37:41.135866 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fe8616b1dafcd32882235738f58e7837f4539198c5ac2952df41311f8790bb6" Sep 30 13:37:41 crc kubenswrapper[4783]: I0930 13:37:41.136069 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.296541 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.296786 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.296594 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.296869 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.345850 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:43 crc kubenswrapper[4783]: I0930 13:37:43.348981 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:37:48 crc kubenswrapper[4783]: I0930 13:37:48.036673 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:48 crc kubenswrapper[4783]: I0930 13:37:48.045691 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/700fd549-bf4a-4e30-9e2c-efdb039a7ac4-metrics-certs\") pod \"network-metrics-daemon-k69sq\" (UID: \"700fd549-bf4a-4e30-9e2c-efdb039a7ac4\") " pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:48 crc kubenswrapper[4783]: I0930 13:37:48.179327 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k69sq" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.296437 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.296516 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.296502 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.296631 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.296688 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.297400 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.297470 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.297481 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"ddc12ace4d979e36f9b5e0d952819e943109c4eb1895002fc12bee3544c56791"} pod="openshift-console/downloads-7954f5f757-z67sw" containerMessage="Container download-server failed liveness probe, will be restarted" Sep 30 13:37:53 crc kubenswrapper[4783]: I0930 13:37:53.297641 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" containerID="cri-o://ddc12ace4d979e36f9b5e0d952819e943109c4eb1895002fc12bee3544c56791" gracePeriod=2 Sep 30 13:37:55 crc kubenswrapper[4783]: I0930 13:37:55.240793 4783 generic.go:334] "Generic (PLEG): container finished" podID="5e34af40-7563-4772-bd48-cc31a0354c25" containerID="ddc12ace4d979e36f9b5e0d952819e943109c4eb1895002fc12bee3544c56791" exitCode=0 Sep 30 13:37:55 crc kubenswrapper[4783]: I0930 13:37:55.241115 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z67sw" event={"ID":"5e34af40-7563-4772-bd48-cc31a0354c25","Type":"ContainerDied","Data":"ddc12ace4d979e36f9b5e0d952819e943109c4eb1895002fc12bee3544c56791"} Sep 30 13:37:56 crc kubenswrapper[4783]: I0930 13:37:56.410319 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:38:03 crc kubenswrapper[4783]: I0930 13:38:03.296323 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:03 crc kubenswrapper[4783]: I0930 13:38:03.296683 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:04 crc kubenswrapper[4783]: I0930 13:38:04.175818 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vqmwl" Sep 30 13:38:04 crc kubenswrapper[4783]: E0930 13:38:04.375010 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285: Get \"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285\": context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 13:38:04 crc kubenswrapper[4783]: E0930 13:38:04.375188 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bzd5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-flrp7_openshift-marketplace(01cead24-2173-4d6e-90b3-5a22bec86918): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285: Get \"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285\": context canceled" logger="UnhandledError" Sep 30 13:38:04 crc kubenswrapper[4783]: E0930 13:38:04.377330 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285: Get \\\"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:7fd0dc32cc360fd8eccbeef60647eb669da91b47f9b9e7a82238ffe30f860285\\\": context canceled\"" pod="openshift-marketplace/redhat-marketplace-flrp7" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" Sep 30 13:38:07 crc kubenswrapper[4783]: I0930 13:38:07.674299 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:38:07 crc kubenswrapper[4783]: I0930 13:38:07.674689 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.469759 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.469902 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v8b8v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-84r4j_openshift-marketplace(594ba4d4-8754-4ed5-8c23-0ce494df36ff): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.471128 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-84r4j" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.491050 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.491179 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sd6r5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dskrd_openshift-marketplace(ed506947-4aea-481f-92e6-be13c8bb206b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 13:38:09 crc kubenswrapper[4783]: E0930 13:38:09.492361 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dskrd" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" Sep 30 13:38:09 crc kubenswrapper[4783]: I0930 13:38:09.919960 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.032735 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-84r4j" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.032795 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-flrp7" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.033465 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dskrd" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.070257 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.070430 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-78dth,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-42xzs_openshift-marketplace(fa3e829d-2caa-4665-80ea-4aeab3a5f220): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 13:38:13 crc kubenswrapper[4783]: E0930 13:38:13.071818 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-42xzs" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" Sep 30 13:38:13 crc kubenswrapper[4783]: I0930 13:38:13.296667 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:13 crc kubenswrapper[4783]: I0930 13:38:13.296732 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:18 crc kubenswrapper[4783]: E0930 13:38:18.782402 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-42xzs" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" Sep 30 13:38:18 crc kubenswrapper[4783]: E0930 13:38:18.846647 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 13:38:18 crc kubenswrapper[4783]: E0930 13:38:18.846793 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8l9x8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-g7rj5_openshift-marketplace(66113e2e-c750-47b9-be53-81e4eddd9202): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 13:38:18 crc kubenswrapper[4783]: E0930 13:38:18.848889 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-g7rj5" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" Sep 30 13:38:19 crc kubenswrapper[4783]: I0930 13:38:19.241732 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k69sq"] Sep 30 13:38:20 crc kubenswrapper[4783]: I0930 13:38:20.395215 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z67sw" event={"ID":"5e34af40-7563-4772-bd48-cc31a0354c25","Type":"ContainerStarted","Data":"05a9a50e0b8d5dfa797a49872b083342e6d1657a49a301bae3f551f8fc286578"} Sep 30 13:38:20 crc kubenswrapper[4783]: I0930 13:38:20.396141 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:38:20 crc kubenswrapper[4783]: I0930 13:38:20.402464 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:20 crc kubenswrapper[4783]: I0930 13:38:20.402564 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:20 crc kubenswrapper[4783]: E0930 13:38:20.536345 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-g7rj5" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" Sep 30 13:38:21 crc kubenswrapper[4783]: I0930 13:38:21.403824 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k69sq" event={"ID":"700fd549-bf4a-4e30-9e2c-efdb039a7ac4","Type":"ContainerStarted","Data":"ec012ba6777ccb3cb33d12a1c32e45cab3f7b78c578d97ed1391ee301cbb8d76"} Sep 30 13:38:21 crc kubenswrapper[4783]: I0930 13:38:21.404613 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:21 crc kubenswrapper[4783]: I0930 13:38:21.404699 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.412427 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerStarted","Data":"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742"} Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.414999 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerStarted","Data":"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81"} Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.417329 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k69sq" event={"ID":"700fd549-bf4a-4e30-9e2c-efdb039a7ac4","Type":"ContainerStarted","Data":"5574900875dd9cf25f74fac4a6a093bca9b2049098d6697f007991a89fd0711a"} Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.420740 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerStarted","Data":"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256"} Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.420838 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:22 crc kubenswrapper[4783]: I0930 13:38:22.420886 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.295993 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.295995 4783 patch_prober.go:28] interesting pod/downloads-7954f5f757-z67sw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.296057 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.296106 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z67sw" podUID="5e34af40-7563-4772-bd48-cc31a0354c25" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.428358 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k69sq" event={"ID":"700fd549-bf4a-4e30-9e2c-efdb039a7ac4","Type":"ContainerStarted","Data":"923b45a2362d12cf6051fa50a25d5b1f8b02064c1f549465628cfc8daa5ecd8a"} Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.430864 4783 generic.go:334] "Generic (PLEG): container finished" podID="94e799b5-88f4-4957-99b6-112c0dc06105" containerID="19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256" exitCode=0 Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.430990 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerDied","Data":"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256"} Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.435359 4783 generic.go:334] "Generic (PLEG): container finished" podID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerID="5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742" exitCode=0 Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.435406 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerDied","Data":"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742"} Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.446108 4783 generic.go:334] "Generic (PLEG): container finished" podID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerID="e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81" exitCode=0 Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.446340 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerDied","Data":"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81"} Sep 30 13:38:23 crc kubenswrapper[4783]: I0930 13:38:23.464279 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-k69sq" podStartSLOduration=178.464258996 podStartE2EDuration="2m58.464258996s" podCreationTimestamp="2025-09-30 13:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:38:23.462955815 +0000 UTC m=+203.394422152" watchObservedRunningTime="2025-09-30 13:38:23.464258996 +0000 UTC m=+203.395725313" Sep 30 13:38:26 crc kubenswrapper[4783]: I0930 13:38:26.484152 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerStarted","Data":"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9"} Sep 30 13:38:28 crc kubenswrapper[4783]: I0930 13:38:28.503340 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerStarted","Data":"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb"} Sep 30 13:38:28 crc kubenswrapper[4783]: I0930 13:38:28.530992 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kqm5j" podStartSLOduration=6.442091354 podStartE2EDuration="55.530970387s" podCreationTimestamp="2025-09-30 13:37:33 +0000 UTC" firstStartedPulling="2025-09-30 13:37:35.971439713 +0000 UTC m=+155.902906020" lastFinishedPulling="2025-09-30 13:38:25.060318736 +0000 UTC m=+204.991785053" observedRunningTime="2025-09-30 13:38:26.514971665 +0000 UTC m=+206.446438032" watchObservedRunningTime="2025-09-30 13:38:28.530970387 +0000 UTC m=+208.462436704" Sep 30 13:38:28 crc kubenswrapper[4783]: I0930 13:38:28.533677 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l6679" podStartSLOduration=4.556756239 podStartE2EDuration="55.533660371s" podCreationTimestamp="2025-09-30 13:37:33 +0000 UTC" firstStartedPulling="2025-09-30 13:37:35.95463813 +0000 UTC m=+155.886104437" lastFinishedPulling="2025-09-30 13:38:26.931542232 +0000 UTC m=+206.863008569" observedRunningTime="2025-09-30 13:38:28.531919647 +0000 UTC m=+208.463385974" watchObservedRunningTime="2025-09-30 13:38:28.533660371 +0000 UTC m=+208.465126718" Sep 30 13:38:33 crc kubenswrapper[4783]: I0930 13:38:33.307872 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-z67sw" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.339012 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.339114 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.345077 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.345143 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.805947 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.807323 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.855600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:38:34 crc kubenswrapper[4783]: I0930 13:38:34.863991 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:35 crc kubenswrapper[4783]: I0930 13:38:35.651517 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:38:36 crc kubenswrapper[4783]: I0930 13:38:36.551822 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l6679" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="registry-server" containerID="cri-o://fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb" gracePeriod=2 Sep 30 13:38:37 crc kubenswrapper[4783]: I0930 13:38:37.674155 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:38:37 crc kubenswrapper[4783]: I0930 13:38:37.674275 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:38:37 crc kubenswrapper[4783]: I0930 13:38:37.674337 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:38:37 crc kubenswrapper[4783]: I0930 13:38:37.675094 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:38:37 crc kubenswrapper[4783]: I0930 13:38:37.675282 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055" gracePeriod=600 Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.400865 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.500994 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzcl8\" (UniqueName: \"kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8\") pod \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.501072 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content\") pod \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.501114 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities\") pod \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\" (UID: \"a8918f0d-0293-43a1-923a-6e3c4a9ceb81\") " Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.502144 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities" (OuterVolumeSpecName: "utilities") pod "a8918f0d-0293-43a1-923a-6e3c4a9ceb81" (UID: "a8918f0d-0293-43a1-923a-6e3c4a9ceb81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.510283 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8" (OuterVolumeSpecName: "kube-api-access-gzcl8") pod "a8918f0d-0293-43a1-923a-6e3c4a9ceb81" (UID: "a8918f0d-0293-43a1-923a-6e3c4a9ceb81"). InnerVolumeSpecName "kube-api-access-gzcl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.555991 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8918f0d-0293-43a1-923a-6e3c4a9ceb81" (UID: "a8918f0d-0293-43a1-923a-6e3c4a9ceb81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.580252 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerStarted","Data":"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.582333 4783 generic.go:334] "Generic (PLEG): container finished" podID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerID="fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb" exitCode=0 Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.582365 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6679" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.582382 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerDied","Data":"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.582408 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6679" event={"ID":"a8918f0d-0293-43a1-923a-6e3c4a9ceb81","Type":"ContainerDied","Data":"a9322cc966f7193efd1be99863150ed0511a600e5ceb5e91001fef31582ff6c2"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.582425 4783 scope.go:117] "RemoveContainer" containerID="fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.590502 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerStarted","Data":"a8c9d6e96b431ab8a3879d776ec8aaf8bb8237ed2d43301c5d99a78d48ec1620"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.594864 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055" exitCode=0 Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.594930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.594955 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.598671 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerStarted","Data":"a5dab1421aa82ab519dfd8e190446cb9dd9cc3440804d6e3221ce27dbf750188"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.600623 4783 scope.go:117] "RemoveContainer" containerID="5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.607843 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.607873 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.607885 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzcl8\" (UniqueName: \"kubernetes.io/projected/a8918f0d-0293-43a1-923a-6e3c4a9ceb81-kube-api-access-gzcl8\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.611665 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerStarted","Data":"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da"} Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.677779 4783 scope.go:117] "RemoveContainer" containerID="a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.702336 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vz86z" podStartSLOduration=3.549001814 podStartE2EDuration="1m5.702322122s" podCreationTimestamp="2025-09-30 13:37:36 +0000 UTC" firstStartedPulling="2025-09-30 13:37:39.125922403 +0000 UTC m=+159.057388710" lastFinishedPulling="2025-09-30 13:38:41.279242711 +0000 UTC m=+221.210709018" observedRunningTime="2025-09-30 13:38:41.681397807 +0000 UTC m=+221.612864114" watchObservedRunningTime="2025-09-30 13:38:41.702322122 +0000 UTC m=+221.633788429" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.712167 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.714571 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l6679"] Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.717570 4783 scope.go:117] "RemoveContainer" containerID="fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb" Sep 30 13:38:41 crc kubenswrapper[4783]: E0930 13:38:41.717961 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb\": container with ID starting with fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb not found: ID does not exist" containerID="fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.717989 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb"} err="failed to get container status \"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb\": rpc error: code = NotFound desc = could not find container \"fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb\": container with ID starting with fa0fb0c53753a3f091c07baafb44377e3a58f61e3528a26f3554f4d61bfd0feb not found: ID does not exist" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.718014 4783 scope.go:117] "RemoveContainer" containerID="5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742" Sep 30 13:38:41 crc kubenswrapper[4783]: E0930 13:38:41.718309 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742\": container with ID starting with 5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742 not found: ID does not exist" containerID="5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.718329 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742"} err="failed to get container status \"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742\": rpc error: code = NotFound desc = could not find container \"5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742\": container with ID starting with 5b6d13e7a687c87bb3c5093b21b6db6f675138d66a9c21f6c78bbdc74087d742 not found: ID does not exist" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.718342 4783 scope.go:117] "RemoveContainer" containerID="a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26" Sep 30 13:38:41 crc kubenswrapper[4783]: E0930 13:38:41.718670 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26\": container with ID starting with a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26 not found: ID does not exist" containerID="a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26" Sep 30 13:38:41 crc kubenswrapper[4783]: I0930 13:38:41.718690 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26"} err="failed to get container status \"a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26\": rpc error: code = NotFound desc = could not find container \"a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26\": container with ID starting with a9011d8d34fbf587c113fc9ace3c0ba69082364defcf5b7529f755ee17905f26 not found: ID does not exist" Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.617868 4783 generic.go:334] "Generic (PLEG): container finished" podID="01cead24-2173-4d6e-90b3-5a22bec86918" containerID="a8c9d6e96b431ab8a3879d776ec8aaf8bb8237ed2d43301c5d99a78d48ec1620" exitCode=0 Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.617963 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerDied","Data":"a8c9d6e96b431ab8a3879d776ec8aaf8bb8237ed2d43301c5d99a78d48ec1620"} Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.623459 4783 generic.go:334] "Generic (PLEG): container finished" podID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerID="a5dab1421aa82ab519dfd8e190446cb9dd9cc3440804d6e3221ce27dbf750188" exitCode=0 Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.623525 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerDied","Data":"a5dab1421aa82ab519dfd8e190446cb9dd9cc3440804d6e3221ce27dbf750188"} Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.625868 4783 generic.go:334] "Generic (PLEG): container finished" podID="66113e2e-c750-47b9-be53-81e4eddd9202" containerID="0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80" exitCode=0 Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.625924 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerDied","Data":"0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80"} Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.636395 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed506947-4aea-481f-92e6-be13c8bb206b" containerID="569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77" exitCode=0 Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.636507 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerDied","Data":"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77"} Sep 30 13:38:42 crc kubenswrapper[4783]: I0930 13:38:42.850394 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" path="/var/lib/kubelet/pods/a8918f0d-0293-43a1-923a-6e3c4a9ceb81/volumes" Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.647009 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerStarted","Data":"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb"} Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.652609 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerStarted","Data":"eae0f99d8f569ead461e0850d43e3003d64dc34e05149d6f667fb699398f566b"} Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.654626 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerStarted","Data":"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26"} Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.656450 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerStarted","Data":"1c23ee1fd38238481249e5a31a0663b723806f77eec22b3277cdbf76106c7290"} Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.658538 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerStarted","Data":"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329"} Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.670648 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dskrd" podStartSLOduration=3.456036627 podStartE2EDuration="1m10.67063186s" podCreationTimestamp="2025-09-30 13:37:33 +0000 UTC" firstStartedPulling="2025-09-30 13:37:35.985100177 +0000 UTC m=+155.916566484" lastFinishedPulling="2025-09-30 13:38:43.19969542 +0000 UTC m=+223.131161717" observedRunningTime="2025-09-30 13:38:43.66937506 +0000 UTC m=+223.600841367" watchObservedRunningTime="2025-09-30 13:38:43.67063186 +0000 UTC m=+223.602098167" Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.688478 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-flrp7" podStartSLOduration=2.7536507390000002 podStartE2EDuration="1m8.688461169s" podCreationTimestamp="2025-09-30 13:37:35 +0000 UTC" firstStartedPulling="2025-09-30 13:37:37.085654687 +0000 UTC m=+157.017121024" lastFinishedPulling="2025-09-30 13:38:43.020465127 +0000 UTC m=+222.951931454" observedRunningTime="2025-09-30 13:38:43.687249751 +0000 UTC m=+223.618716068" watchObservedRunningTime="2025-09-30 13:38:43.688461169 +0000 UTC m=+223.619927476" Sep 30 13:38:43 crc kubenswrapper[4783]: I0930 13:38:43.731356 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-84r4j" podStartSLOduration=3.54282873 podStartE2EDuration="1m10.731339301s" podCreationTimestamp="2025-09-30 13:37:33 +0000 UTC" firstStartedPulling="2025-09-30 13:37:35.950778448 +0000 UTC m=+155.882244755" lastFinishedPulling="2025-09-30 13:38:43.139289019 +0000 UTC m=+223.070755326" observedRunningTime="2025-09-30 13:38:43.729847155 +0000 UTC m=+223.661313462" watchObservedRunningTime="2025-09-30 13:38:43.731339301 +0000 UTC m=+223.662805608" Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.208784 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.209370 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.209675 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.209703 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.666272 4783 generic.go:334] "Generic (PLEG): container finished" podID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerID="05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26" exitCode=0 Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.666343 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerDied","Data":"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26"} Sep 30 13:38:44 crc kubenswrapper[4783]: I0930 13:38:44.686623 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g7rj5" podStartSLOduration=3.6704509400000003 podStartE2EDuration="1m9.686604481s" podCreationTimestamp="2025-09-30 13:37:35 +0000 UTC" firstStartedPulling="2025-09-30 13:37:37.077263981 +0000 UTC m=+157.008730328" lastFinishedPulling="2025-09-30 13:38:43.093417562 +0000 UTC m=+223.024883869" observedRunningTime="2025-09-30 13:38:43.754466126 +0000 UTC m=+223.685932453" watchObservedRunningTime="2025-09-30 13:38:44.686604481 +0000 UTC m=+224.618070788" Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.245557 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-dskrd" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="registry-server" probeResult="failure" output=< Sep 30 13:38:45 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:38:45 crc kubenswrapper[4783]: > Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.247050 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-84r4j" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="registry-server" probeResult="failure" output=< Sep 30 13:38:45 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:38:45 crc kubenswrapper[4783]: > Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.674506 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerStarted","Data":"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a"} Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.693186 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-42xzs" podStartSLOduration=3.755951344 podStartE2EDuration="1m9.693168617s" podCreationTimestamp="2025-09-30 13:37:36 +0000 UTC" firstStartedPulling="2025-09-30 13:37:39.118705985 +0000 UTC m=+159.050172292" lastFinishedPulling="2025-09-30 13:38:45.055923248 +0000 UTC m=+224.987389565" observedRunningTime="2025-09-30 13:38:45.690124051 +0000 UTC m=+225.621590368" watchObservedRunningTime="2025-09-30 13:38:45.693168617 +0000 UTC m=+225.624634924" Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.820659 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.820742 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:38:45 crc kubenswrapper[4783]: I0930 13:38:45.883166 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:38:46 crc kubenswrapper[4783]: I0930 13:38:46.042910 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:46 crc kubenswrapper[4783]: I0930 13:38:46.043183 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:46 crc kubenswrapper[4783]: I0930 13:38:46.106133 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:46 crc kubenswrapper[4783]: I0930 13:38:46.620697 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:38:46 crc kubenswrapper[4783]: I0930 13:38:46.621000 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:38:47 crc kubenswrapper[4783]: I0930 13:38:47.066489 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:47 crc kubenswrapper[4783]: I0930 13:38:47.066770 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:47 crc kubenswrapper[4783]: I0930 13:38:47.689667 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vz86z" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="registry-server" probeResult="failure" output=< Sep 30 13:38:47 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:38:47 crc kubenswrapper[4783]: > Sep 30 13:38:48 crc kubenswrapper[4783]: I0930 13:38:48.123009 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-42xzs" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="registry-server" probeResult="failure" output=< Sep 30 13:38:48 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:38:48 crc kubenswrapper[4783]: > Sep 30 13:38:54 crc kubenswrapper[4783]: I0930 13:38:54.264690 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:54 crc kubenswrapper[4783]: I0930 13:38:54.266427 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:38:54 crc kubenswrapper[4783]: I0930 13:38:54.302438 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:54 crc kubenswrapper[4783]: I0930 13:38:54.339328 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:38:55 crc kubenswrapper[4783]: I0930 13:38:55.249374 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:38:55 crc kubenswrapper[4783]: I0930 13:38:55.758883 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-84r4j" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="registry-server" containerID="cri-o://1c23ee1fd38238481249e5a31a0663b723806f77eec22b3277cdbf76106c7290" gracePeriod=2 Sep 30 13:38:55 crc kubenswrapper[4783]: I0930 13:38:55.887657 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:38:56 crc kubenswrapper[4783]: I0930 13:38:56.101723 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:56 crc kubenswrapper[4783]: I0930 13:38:56.686854 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:38:56 crc kubenswrapper[4783]: I0930 13:38:56.740048 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:38:57 crc kubenswrapper[4783]: I0930 13:38:57.125338 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:57 crc kubenswrapper[4783]: I0930 13:38:57.168304 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:57 crc kubenswrapper[4783]: I0930 13:38:57.784044 4783 generic.go:334] "Generic (PLEG): container finished" podID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerID="1c23ee1fd38238481249e5a31a0663b723806f77eec22b3277cdbf76106c7290" exitCode=0 Sep 30 13:38:57 crc kubenswrapper[4783]: I0930 13:38:57.784142 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerDied","Data":"1c23ee1fd38238481249e5a31a0663b723806f77eec22b3277cdbf76106c7290"} Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.251129 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.251728 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-flrp7" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="registry-server" containerID="cri-o://eae0f99d8f569ead461e0850d43e3003d64dc34e05149d6f667fb699398f566b" gracePeriod=2 Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.383915 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.465387 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities\") pod \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.465536 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8b8v\" (UniqueName: \"kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v\") pod \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.465583 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content\") pod \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\" (UID: \"594ba4d4-8754-4ed5-8c23-0ce494df36ff\") " Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.467169 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities" (OuterVolumeSpecName: "utilities") pod "594ba4d4-8754-4ed5-8c23-0ce494df36ff" (UID: "594ba4d4-8754-4ed5-8c23-0ce494df36ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.471850 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v" (OuterVolumeSpecName: "kube-api-access-v8b8v") pod "594ba4d4-8754-4ed5-8c23-0ce494df36ff" (UID: "594ba4d4-8754-4ed5-8c23-0ce494df36ff"). InnerVolumeSpecName "kube-api-access-v8b8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.518633 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "594ba4d4-8754-4ed5-8c23-0ce494df36ff" (UID: "594ba4d4-8754-4ed5-8c23-0ce494df36ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.566579 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8b8v\" (UniqueName: \"kubernetes.io/projected/594ba4d4-8754-4ed5-8c23-0ce494df36ff-kube-api-access-v8b8v\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.566617 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.566627 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/594ba4d4-8754-4ed5-8c23-0ce494df36ff-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.794356 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84r4j" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.795085 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84r4j" event={"ID":"594ba4d4-8754-4ed5-8c23-0ce494df36ff","Type":"ContainerDied","Data":"f54da8ea804e24f6108f9cc44bb51d3b57362a04254a432c0f8d8b0930ce4d33"} Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.795148 4783 scope.go:117] "RemoveContainer" containerID="1c23ee1fd38238481249e5a31a0663b723806f77eec22b3277cdbf76106c7290" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.800585 4783 generic.go:334] "Generic (PLEG): container finished" podID="01cead24-2173-4d6e-90b3-5a22bec86918" containerID="eae0f99d8f569ead461e0850d43e3003d64dc34e05149d6f667fb699398f566b" exitCode=0 Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.800647 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerDied","Data":"eae0f99d8f569ead461e0850d43e3003d64dc34e05149d6f667fb699398f566b"} Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.820032 4783 scope.go:117] "RemoveContainer" containerID="a5dab1421aa82ab519dfd8e190446cb9dd9cc3440804d6e3221ce27dbf750188" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.854626 4783 scope.go:117] "RemoveContainer" containerID="614ec72bebd2ce49c964886fb36513087d70ac1268c09567b361f195a0ff9d09" Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.859456 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:38:58 crc kubenswrapper[4783]: I0930 13:38:58.859527 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-84r4j"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.248530 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.249160 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-42xzs" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="registry-server" containerID="cri-o://7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a" gracePeriod=2 Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.293797 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.377301 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content\") pod \"01cead24-2173-4d6e-90b3-5a22bec86918\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.378290 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzd5b\" (UniqueName: \"kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b\") pod \"01cead24-2173-4d6e-90b3-5a22bec86918\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.378376 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities\") pod \"01cead24-2173-4d6e-90b3-5a22bec86918\" (UID: \"01cead24-2173-4d6e-90b3-5a22bec86918\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.380688 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities" (OuterVolumeSpecName: "utilities") pod "01cead24-2173-4d6e-90b3-5a22bec86918" (UID: "01cead24-2173-4d6e-90b3-5a22bec86918"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.382742 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b" (OuterVolumeSpecName: "kube-api-access-bzd5b") pod "01cead24-2173-4d6e-90b3-5a22bec86918" (UID: "01cead24-2173-4d6e-90b3-5a22bec86918"). InnerVolumeSpecName "kube-api-access-bzd5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.392208 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01cead24-2173-4d6e-90b3-5a22bec86918" (UID: "01cead24-2173-4d6e-90b3-5a22bec86918"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.480087 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.480121 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01cead24-2173-4d6e-90b3-5a22bec86918-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.480132 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzd5b\" (UniqueName: \"kubernetes.io/projected/01cead24-2173-4d6e-90b3-5a22bec86918-kube-api-access-bzd5b\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.633044 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.681797 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities\") pod \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.681955 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content\") pod \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.682106 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78dth\" (UniqueName: \"kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth\") pod \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\" (UID: \"fa3e829d-2caa-4665-80ea-4aeab3a5f220\") " Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.684645 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities" (OuterVolumeSpecName: "utilities") pod "fa3e829d-2caa-4665-80ea-4aeab3a5f220" (UID: "fa3e829d-2caa-4665-80ea-4aeab3a5f220"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.686455 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth" (OuterVolumeSpecName: "kube-api-access-78dth") pod "fa3e829d-2caa-4665-80ea-4aeab3a5f220" (UID: "fa3e829d-2caa-4665-80ea-4aeab3a5f220"). InnerVolumeSpecName "kube-api-access-78dth". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.759790 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa3e829d-2caa-4665-80ea-4aeab3a5f220" (UID: "fa3e829d-2caa-4665-80ea-4aeab3a5f220"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.783284 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78dth\" (UniqueName: \"kubernetes.io/projected/fa3e829d-2caa-4665-80ea-4aeab3a5f220-kube-api-access-78dth\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.783337 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.783353 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3e829d-2caa-4665-80ea-4aeab3a5f220-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.811557 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-flrp7" event={"ID":"01cead24-2173-4d6e-90b3-5a22bec86918","Type":"ContainerDied","Data":"f51ed38968b9c93ea91681572260ae58f8aba7167c775d08b299e71bc3de097c"} Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.811617 4783 scope.go:117] "RemoveContainer" containerID="eae0f99d8f569ead461e0850d43e3003d64dc34e05149d6f667fb699398f566b" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.811737 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-flrp7" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.814824 4783 generic.go:334] "Generic (PLEG): container finished" podID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerID="7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a" exitCode=0 Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.814949 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerDied","Data":"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a"} Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.814978 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42xzs" event={"ID":"fa3e829d-2caa-4665-80ea-4aeab3a5f220","Type":"ContainerDied","Data":"b8c75bda2f5f1ed3f88a50bafa2c7792e5bd65f2c5f04dd2668c7d6e1f272154"} Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.815293 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42xzs" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.835540 4783 scope.go:117] "RemoveContainer" containerID="a8c9d6e96b431ab8a3879d776ec8aaf8bb8237ed2d43301c5d99a78d48ec1620" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.850951 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.856096 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-flrp7"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.860961 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.863824 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-42xzs"] Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.876590 4783 scope.go:117] "RemoveContainer" containerID="efa61047671090b006e0d3c4c21db72ee1be7781d87be7dcd53bf3bf84c56e00" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.894723 4783 scope.go:117] "RemoveContainer" containerID="7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.909171 4783 scope.go:117] "RemoveContainer" containerID="05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.922726 4783 scope.go:117] "RemoveContainer" containerID="72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.938445 4783 scope.go:117] "RemoveContainer" containerID="7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a" Sep 30 13:38:59 crc kubenswrapper[4783]: E0930 13:38:59.938858 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a\": container with ID starting with 7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a not found: ID does not exist" containerID="7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.938895 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a"} err="failed to get container status \"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a\": rpc error: code = NotFound desc = could not find container \"7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a\": container with ID starting with 7ea602bf630db6000d9e357e159307e1a44a089b66c371888ea8cd59391f558a not found: ID does not exist" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.938927 4783 scope.go:117] "RemoveContainer" containerID="05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26" Sep 30 13:38:59 crc kubenswrapper[4783]: E0930 13:38:59.939289 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26\": container with ID starting with 05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26 not found: ID does not exist" containerID="05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.939328 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26"} err="failed to get container status \"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26\": rpc error: code = NotFound desc = could not find container \"05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26\": container with ID starting with 05c6a7d4fca08bd6c6a4c548a59bc6beee540504561d145706b54c0e206acd26 not found: ID does not exist" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.939354 4783 scope.go:117] "RemoveContainer" containerID="72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700" Sep 30 13:38:59 crc kubenswrapper[4783]: E0930 13:38:59.939608 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700\": container with ID starting with 72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700 not found: ID does not exist" containerID="72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700" Sep 30 13:38:59 crc kubenswrapper[4783]: I0930 13:38:59.939657 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700"} err="failed to get container status \"72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700\": rpc error: code = NotFound desc = could not find container \"72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700\": container with ID starting with 72cbcdaf5021db163f7efd45aa415b4acea399534128210df13b5089f19be700 not found: ID does not exist" Sep 30 13:39:00 crc kubenswrapper[4783]: I0930 13:39:00.851323 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" path="/var/lib/kubelet/pods/01cead24-2173-4d6e-90b3-5a22bec86918/volumes" Sep 30 13:39:00 crc kubenswrapper[4783]: I0930 13:39:00.852781 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" path="/var/lib/kubelet/pods/594ba4d4-8754-4ed5-8c23-0ce494df36ff/volumes" Sep 30 13:39:00 crc kubenswrapper[4783]: I0930 13:39:00.853989 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" path="/var/lib/kubelet/pods/fa3e829d-2caa-4665-80ea-4aeab3a5f220/volumes" Sep 30 13:39:13 crc kubenswrapper[4783]: I0930 13:39:13.316085 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.339488 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" containerID="cri-o://0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12" gracePeriod=15 Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.780253 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.819903 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5686c9c7dd-57tft"] Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820392 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820407 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820422 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820431 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820442 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820452 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820464 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820473 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820485 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820493 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820514 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820522 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820534 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820542 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820551 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820559 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820570 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820579 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820591 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820600 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820611 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820619 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820629 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820637 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="extract-content" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820651 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af2276a-3ae6-4c19-b75c-935d765d3890" containerName="collect-profiles" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820661 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af2276a-3ae6-4c19-b75c-935d765d3890" containerName="collect-profiles" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820670 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820679 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="extract-utilities" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820691 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1ba935-2f99-4188-b2df-d657dc7c28bc" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820699 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1ba935-2f99-4188-b2df-d657dc7c28bc" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: E0930 13:39:38.820711 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820719 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820828 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d9bb3ab-0f4e-4a5b-af8e-b9814c3e6e48" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820844 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01cead24-2173-4d6e-90b3-5a22bec86918" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820853 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="594ba4d4-8754-4ed5-8c23-0ce494df36ff" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820865 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa3e829d-2caa-4665-80ea-4aeab3a5f220" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820881 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8918f0d-0293-43a1-923a-6e3c4a9ceb81" containerName="registry-server" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820891 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerName="oauth-openshift" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820902 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc1ba935-2f99-4188-b2df-d657dc7c28bc" containerName="pruner" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.820913 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6af2276a-3ae6-4c19-b75c-935d765d3890" containerName="collect-profiles" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.821364 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.838491 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5686c9c7dd-57tft"] Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890711 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890771 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890802 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890818 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890841 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890879 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890907 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890933 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.890973 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96xx4\" (UniqueName: \"kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891004 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891040 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891069 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891099 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891136 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891166 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error\") pod \"d452858f-ce60-4cf1-83ec-ac72613ca649\" (UID: \"d452858f-ce60-4cf1-83ec-ac72613ca649\") " Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891403 4783 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891583 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891610 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.891655 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.892167 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.898887 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.898904 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4" (OuterVolumeSpecName: "kube-api-access-96xx4") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "kube-api-access-96xx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.899793 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.899816 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.900097 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.900256 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.900427 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.900571 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.900719 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d452858f-ce60-4cf1-83ec-ac72613ca649" (UID: "d452858f-ce60-4cf1-83ec-ac72613ca649"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.991921 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.991958 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.991975 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.991994 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992035 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-session\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992058 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992084 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992104 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992120 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-error\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992180 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992208 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-policies\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992269 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-login\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992290 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-dir\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992314 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mq8c\" (UniqueName: \"kubernetes.io/projected/88b546a1-8b9c-4e70-87d3-913ef433e0f0-kube-api-access-6mq8c\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992359 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992371 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992381 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992392 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992401 4783 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992414 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992424 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96xx4\" (UniqueName: \"kubernetes.io/projected/d452858f-ce60-4cf1-83ec-ac72613ca649-kube-api-access-96xx4\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992432 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992441 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992451 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992461 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992472 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:38 crc kubenswrapper[4783]: I0930 13:39:38.992482 4783 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d452858f-ce60-4cf1-83ec-ac72613ca649-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.043599 4783 generic.go:334] "Generic (PLEG): container finished" podID="d452858f-ce60-4cf1-83ec-ac72613ca649" containerID="0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12" exitCode=0 Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.043636 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" event={"ID":"d452858f-ce60-4cf1-83ec-ac72613ca649","Type":"ContainerDied","Data":"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12"} Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.043663 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" event={"ID":"d452858f-ce60-4cf1-83ec-ac72613ca649","Type":"ContainerDied","Data":"eb73b215693b0f52b4f908ee5e15bb242db1a88df130f285679fd2e9cfb9b566"} Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.043679 4783 scope.go:117] "RemoveContainer" containerID="0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.043754 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7z2r4" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.062109 4783 scope.go:117] "RemoveContainer" containerID="0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12" Sep 30 13:39:39 crc kubenswrapper[4783]: E0930 13:39:39.062562 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12\": container with ID starting with 0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12 not found: ID does not exist" containerID="0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.062608 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12"} err="failed to get container status \"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12\": rpc error: code = NotFound desc = could not find container \"0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12\": container with ID starting with 0e634fc110d9abaf5e599fce893748cb71a55e363885fd61496f1bc183c1bf12 not found: ID does not exist" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.072102 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.080876 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7z2r4"] Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.093919 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-session\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.093962 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.093987 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094011 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-error\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094029 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094058 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094084 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-policies\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094117 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-login\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094133 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-dir\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094155 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mq8c\" (UniqueName: \"kubernetes.io/projected/88b546a1-8b9c-4e70-87d3-913ef433e0f0-kube-api-access-6mq8c\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094179 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094197 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094213 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.094254 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.095130 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.095255 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-dir\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.095667 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.095891 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.095902 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88b546a1-8b9c-4e70-87d3-913ef433e0f0-audit-policies\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.097506 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.097740 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-session\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.097795 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-login\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.097855 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.099676 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-template-error\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.100824 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.101712 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.102550 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/88b546a1-8b9c-4e70-87d3-913ef433e0f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.110807 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mq8c\" (UniqueName: \"kubernetes.io/projected/88b546a1-8b9c-4e70-87d3-913ef433e0f0-kube-api-access-6mq8c\") pod \"oauth-openshift-5686c9c7dd-57tft\" (UID: \"88b546a1-8b9c-4e70-87d3-913ef433e0f0\") " pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.147128 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:39 crc kubenswrapper[4783]: I0930 13:39:39.637413 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5686c9c7dd-57tft"] Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.057251 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" event={"ID":"88b546a1-8b9c-4e70-87d3-913ef433e0f0","Type":"ContainerStarted","Data":"1233ce414cc534918e5dfecabfd84eae1dc7786db2f3b4022926b2d0846f64e3"} Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.058764 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.058879 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" event={"ID":"88b546a1-8b9c-4e70-87d3-913ef433e0f0","Type":"ContainerStarted","Data":"d46e693eb3a83594b8d2104c8d5827349dac512bbb39c479879a5b11e480edeb"} Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.095956 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" podStartSLOduration=27.095929987 podStartE2EDuration="27.095929987s" podCreationTimestamp="2025-09-30 13:39:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:39:40.087076197 +0000 UTC m=+280.018542574" watchObservedRunningTime="2025-09-30 13:39:40.095929987 +0000 UTC m=+280.027396334" Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.627621 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5686c9c7dd-57tft" Sep 30 13:39:40 crc kubenswrapper[4783]: I0930 13:39:40.854212 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d452858f-ce60-4cf1-83ec-ac72613ca649" path="/var/lib/kubelet/pods/d452858f-ce60-4cf1-83ec-ac72613ca649/volumes" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.171443 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.172281 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dskrd" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="registry-server" containerID="cri-o://41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb" gracePeriod=30 Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.183688 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.184314 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kqm5j" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="registry-server" containerID="cri-o://db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9" gracePeriod=30 Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.186762 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.187011 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" podUID="91c12832-2428-4e1c-b9de-18936239646c" containerName="marketplace-operator" containerID="cri-o://55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626" gracePeriod=30 Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.204149 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.204418 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g7rj5" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="registry-server" containerID="cri-o://4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329" gracePeriod=30 Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.209291 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx8q"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.223173 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.228663 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.231094 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vz86z" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="registry-server" containerID="cri-o://27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da" gracePeriod=30 Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.243406 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx8q"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.332422 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tmcc\" (UniqueName: \"kubernetes.io/projected/ac751948-b749-45af-8006-6dfe52d63607-kube-api-access-8tmcc\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.332520 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac751948-b749-45af-8006-6dfe52d63607-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.332550 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac751948-b749-45af-8006-6dfe52d63607-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.433590 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tmcc\" (UniqueName: \"kubernetes.io/projected/ac751948-b749-45af-8006-6dfe52d63607-kube-api-access-8tmcc\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.433643 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac751948-b749-45af-8006-6dfe52d63607-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.433661 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac751948-b749-45af-8006-6dfe52d63607-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.435102 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac751948-b749-45af-8006-6dfe52d63607-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.439763 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac751948-b749-45af-8006-6dfe52d63607-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.452511 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tmcc\" (UniqueName: \"kubernetes.io/projected/ac751948-b749-45af-8006-6dfe52d63607-kube-api-access-8tmcc\") pod \"marketplace-operator-79b997595-lrx8q\" (UID: \"ac751948-b749-45af-8006-6dfe52d63607\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.603338 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.610157 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.657769 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.664057 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.670364 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.678187 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.738341 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities\") pod \"ed506947-4aea-481f-92e6-be13c8bb206b\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.738626 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94gfz\" (UniqueName: \"kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz\") pod \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.738703 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content\") pod \"66113e2e-c750-47b9-be53-81e4eddd9202\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.738804 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content\") pod \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.738968 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgb9n\" (UniqueName: \"kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n\") pod \"94e799b5-88f4-4957-99b6-112c0dc06105\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739050 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content\") pod \"ed506947-4aea-481f-92e6-be13c8bb206b\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739142 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content\") pod \"94e799b5-88f4-4957-99b6-112c0dc06105\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739208 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") pod \"91c12832-2428-4e1c-b9de-18936239646c\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd6r5\" (UniqueName: \"kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5\") pod \"ed506947-4aea-481f-92e6-be13c8bb206b\" (UID: \"ed506947-4aea-481f-92e6-be13c8bb206b\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739394 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities" (OuterVolumeSpecName: "utilities") pod "ed506947-4aea-481f-92e6-be13c8bb206b" (UID: "ed506947-4aea-481f-92e6-be13c8bb206b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.739657 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.743472 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n" (OuterVolumeSpecName: "kube-api-access-sgb9n") pod "94e799b5-88f4-4957-99b6-112c0dc06105" (UID: "94e799b5-88f4-4957-99b6-112c0dc06105"). InnerVolumeSpecName "kube-api-access-sgb9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.746506 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5" (OuterVolumeSpecName: "kube-api-access-sd6r5") pod "ed506947-4aea-481f-92e6-be13c8bb206b" (UID: "ed506947-4aea-481f-92e6-be13c8bb206b"). InnerVolumeSpecName "kube-api-access-sd6r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.757795 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz" (OuterVolumeSpecName: "kube-api-access-94gfz") pod "8d67aba9-ee2c-4608-98d5-f3e6f248ea66" (UID: "8d67aba9-ee2c-4608-98d5-f3e6f248ea66"). InnerVolumeSpecName "kube-api-access-94gfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.760163 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "91c12832-2428-4e1c-b9de-18936239646c" (UID: "91c12832-2428-4e1c-b9de-18936239646c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.763779 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66113e2e-c750-47b9-be53-81e4eddd9202" (UID: "66113e2e-c750-47b9-be53-81e4eddd9202"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.812857 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94e799b5-88f4-4957-99b6-112c0dc06105" (UID: "94e799b5-88f4-4957-99b6-112c0dc06105"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.825267 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed506947-4aea-481f-92e6-be13c8bb206b" (UID: "ed506947-4aea-481f-92e6-be13c8bb206b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841087 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cslhd\" (UniqueName: \"kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd\") pod \"91c12832-2428-4e1c-b9de-18936239646c\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841133 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities\") pod \"66113e2e-c750-47b9-be53-81e4eddd9202\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841159 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") pod \"91c12832-2428-4e1c-b9de-18936239646c\" (UID: \"91c12832-2428-4e1c-b9de-18936239646c\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841183 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l9x8\" (UniqueName: \"kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8\") pod \"66113e2e-c750-47b9-be53-81e4eddd9202\" (UID: \"66113e2e-c750-47b9-be53-81e4eddd9202\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841204 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities\") pod \"94e799b5-88f4-4957-99b6-112c0dc06105\" (UID: \"94e799b5-88f4-4957-99b6-112c0dc06105\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841232 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities\") pod \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\" (UID: \"8d67aba9-ee2c-4608-98d5-f3e6f248ea66\") " Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841364 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94gfz\" (UniqueName: \"kubernetes.io/projected/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-kube-api-access-94gfz\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841378 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841387 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgb9n\" (UniqueName: \"kubernetes.io/projected/94e799b5-88f4-4957-99b6-112c0dc06105-kube-api-access-sgb9n\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841394 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841402 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed506947-4aea-481f-92e6-be13c8bb206b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841410 4783 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/91c12832-2428-4e1c-b9de-18936239646c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.841418 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd6r5\" (UniqueName: \"kubernetes.io/projected/ed506947-4aea-481f-92e6-be13c8bb206b-kube-api-access-sd6r5\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.842107 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities" (OuterVolumeSpecName: "utilities") pod "8d67aba9-ee2c-4608-98d5-f3e6f248ea66" (UID: "8d67aba9-ee2c-4608-98d5-f3e6f248ea66"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.842341 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "91c12832-2428-4e1c-b9de-18936239646c" (UID: "91c12832-2428-4e1c-b9de-18936239646c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.843108 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities" (OuterVolumeSpecName: "utilities") pod "94e799b5-88f4-4957-99b6-112c0dc06105" (UID: "94e799b5-88f4-4957-99b6-112c0dc06105"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.843128 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities" (OuterVolumeSpecName: "utilities") pod "66113e2e-c750-47b9-be53-81e4eddd9202" (UID: "66113e2e-c750-47b9-be53-81e4eddd9202"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.843942 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd" (OuterVolumeSpecName: "kube-api-access-cslhd") pod "91c12832-2428-4e1c-b9de-18936239646c" (UID: "91c12832-2428-4e1c-b9de-18936239646c"). InnerVolumeSpecName "kube-api-access-cslhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.844432 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8" (OuterVolumeSpecName: "kube-api-access-8l9x8") pod "66113e2e-c750-47b9-be53-81e4eddd9202" (UID: "66113e2e-c750-47b9-be53-81e4eddd9202"). InnerVolumeSpecName "kube-api-access-8l9x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.846012 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx8q"] Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.876080 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d67aba9-ee2c-4608-98d5-f3e6f248ea66" (UID: "8d67aba9-ee2c-4608-98d5-f3e6f248ea66"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942446 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942483 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cslhd\" (UniqueName: \"kubernetes.io/projected/91c12832-2428-4e1c-b9de-18936239646c-kube-api-access-cslhd\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942498 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66113e2e-c750-47b9-be53-81e4eddd9202-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942508 4783 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91c12832-2428-4e1c-b9de-18936239646c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942519 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l9x8\" (UniqueName: \"kubernetes.io/projected/66113e2e-c750-47b9-be53-81e4eddd9202-kube-api-access-8l9x8\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942530 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e799b5-88f4-4957-99b6-112c0dc06105-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:55 crc kubenswrapper[4783]: I0930 13:39:55.942538 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d67aba9-ee2c-4608-98d5-f3e6f248ea66-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.151438 4783 generic.go:334] "Generic (PLEG): container finished" podID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerID="27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da" exitCode=0 Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.151509 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vz86z" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.151498 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerDied","Data":"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.151581 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vz86z" event={"ID":"8d67aba9-ee2c-4608-98d5-f3e6f248ea66","Type":"ContainerDied","Data":"eab7872d9e5171db9e5d9683e19495063f325e800ef8c1f5f28431066f4e6e11"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.151613 4783 scope.go:117] "RemoveContainer" containerID="27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.153212 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" event={"ID":"ac751948-b749-45af-8006-6dfe52d63607","Type":"ContainerStarted","Data":"5f3bab65b6a90932fb13dd7fe2b0c8e2818f124b9b744beb00a4fda4922bddf8"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.153260 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" event={"ID":"ac751948-b749-45af-8006-6dfe52d63607","Type":"ContainerStarted","Data":"3ecfd331cfc3849f0cf2bb1f52d0597537e38e74fc5885383bd9b480b25c97e5"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.153418 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.154622 4783 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-lrx8q container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.154674 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" podUID="ac751948-b749-45af-8006-6dfe52d63607" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.155295 4783 generic.go:334] "Generic (PLEG): container finished" podID="66113e2e-c750-47b9-be53-81e4eddd9202" containerID="4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329" exitCode=0 Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.155356 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7rj5" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.155364 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerDied","Data":"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.155467 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7rj5" event={"ID":"66113e2e-c750-47b9-be53-81e4eddd9202","Type":"ContainerDied","Data":"a89d89c62948e3e1db1080e82c1ff44b9b5c8684cfd4a38c8b85a7941bff7286"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.157498 4783 generic.go:334] "Generic (PLEG): container finished" podID="94e799b5-88f4-4957-99b6-112c0dc06105" containerID="db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9" exitCode=0 Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.157549 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqm5j" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.157581 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerDied","Data":"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.157618 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqm5j" event={"ID":"94e799b5-88f4-4957-99b6-112c0dc06105","Type":"ContainerDied","Data":"e82673f8b0a8eedb32eb081dc4b38854af051feb5ef448c662d1517cfe38ae9e"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.159455 4783 generic.go:334] "Generic (PLEG): container finished" podID="91c12832-2428-4e1c-b9de-18936239646c" containerID="55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626" exitCode=0 Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.159499 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" event={"ID":"91c12832-2428-4e1c-b9de-18936239646c","Type":"ContainerDied","Data":"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.159517 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" event={"ID":"91c12832-2428-4e1c-b9de-18936239646c","Type":"ContainerDied","Data":"e70391c3e887db36220243fa83636c76aeddf103fb41e90a0b94dcecf342e889"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.159566 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4hvcd" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.162047 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed506947-4aea-481f-92e6-be13c8bb206b" containerID="41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb" exitCode=0 Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.162093 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerDied","Data":"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.162126 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dskrd" event={"ID":"ed506947-4aea-481f-92e6-be13c8bb206b","Type":"ContainerDied","Data":"b1806b1f3ace7df780f7372267f4146e1a00f737a20fe215ed6c38a990bc399e"} Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.162174 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dskrd" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.168188 4783 scope.go:117] "RemoveContainer" containerID="e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.185287 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" podStartSLOduration=1.185263188 podStartE2EDuration="1.185263188s" podCreationTimestamp="2025-09-30 13:39:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:39:56.180057274 +0000 UTC m=+296.111523601" watchObservedRunningTime="2025-09-30 13:39:56.185263188 +0000 UTC m=+296.116729495" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.211855 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.216506 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vz86z"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.224068 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.224970 4783 scope.go:117] "RemoveContainer" containerID="69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.227580 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kqm5j"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.238233 4783 scope.go:117] "RemoveContainer" containerID="27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.238657 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da\": container with ID starting with 27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da not found: ID does not exist" containerID="27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.238707 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da"} err="failed to get container status \"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da\": rpc error: code = NotFound desc = could not find container \"27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da\": container with ID starting with 27792582afcb5c2d561240d8a0bfeab5db62a9138fb6049348a1ba35771202da not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.238739 4783 scope.go:117] "RemoveContainer" containerID="e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.239088 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81\": container with ID starting with e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81 not found: ID does not exist" containerID="e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.239114 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81"} err="failed to get container status \"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81\": rpc error: code = NotFound desc = could not find container \"e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81\": container with ID starting with e1b6a6d7645481ad871d4f52911e1d97c4b70c2bffb3329b7c52c38a408ddf81 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.239129 4783 scope.go:117] "RemoveContainer" containerID="69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.239380 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85\": container with ID starting with 69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85 not found: ID does not exist" containerID="69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.239402 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85"} err="failed to get container status \"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85\": rpc error: code = NotFound desc = could not find container \"69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85\": container with ID starting with 69be560c81ec557f1e46a863aaa1ecbbe05895de03e2eca9e74c1e51a88e4f85 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.239417 4783 scope.go:117] "RemoveContainer" containerID="4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.239977 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.243147 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7rj5"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.249350 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.252161 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dskrd"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.259928 4783 scope.go:117] "RemoveContainer" containerID="0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.275610 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.278949 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4hvcd"] Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.287638 4783 scope.go:117] "RemoveContainer" containerID="07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.308063 4783 scope.go:117] "RemoveContainer" containerID="4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.308428 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329\": container with ID starting with 4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329 not found: ID does not exist" containerID="4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.308457 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329"} err="failed to get container status \"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329\": rpc error: code = NotFound desc = could not find container \"4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329\": container with ID starting with 4d828f8c06a9f2eee62fdc5979f2fdaffc85ead23b3b9e5341ccfa18b8be9329 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.308482 4783 scope.go:117] "RemoveContainer" containerID="0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.309006 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80\": container with ID starting with 0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80 not found: ID does not exist" containerID="0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.309072 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80"} err="failed to get container status \"0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80\": rpc error: code = NotFound desc = could not find container \"0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80\": container with ID starting with 0ccc81b322caf799458e465a7a8dfd1467657c9b261630ed8f95e1b3151e2a80 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.309108 4783 scope.go:117] "RemoveContainer" containerID="07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.309491 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59\": container with ID starting with 07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59 not found: ID does not exist" containerID="07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.309544 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59"} err="failed to get container status \"07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59\": rpc error: code = NotFound desc = could not find container \"07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59\": container with ID starting with 07564d10aae1e592921c34276187e06732fec4525ef35652a6f0ebf651ae6c59 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.309564 4783 scope.go:117] "RemoveContainer" containerID="db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.322575 4783 scope.go:117] "RemoveContainer" containerID="19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.379637 4783 scope.go:117] "RemoveContainer" containerID="3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.393740 4783 scope.go:117] "RemoveContainer" containerID="db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.394092 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9\": container with ID starting with db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9 not found: ID does not exist" containerID="db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.394137 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9"} err="failed to get container status \"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9\": rpc error: code = NotFound desc = could not find container \"db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9\": container with ID starting with db3e3933cd451c628107aa24a87d223ab8e26a0ca669afb2e476ca85c9f8efc9 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.394165 4783 scope.go:117] "RemoveContainer" containerID="19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.394556 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256\": container with ID starting with 19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256 not found: ID does not exist" containerID="19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.394607 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256"} err="failed to get container status \"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256\": rpc error: code = NotFound desc = could not find container \"19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256\": container with ID starting with 19c63833de5c7bc2f5ba652e18a8270afbf9be20c1de5c407a32c3e820dba256 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.394636 4783 scope.go:117] "RemoveContainer" containerID="3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.395041 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad\": container with ID starting with 3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad not found: ID does not exist" containerID="3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.395072 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad"} err="failed to get container status \"3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad\": rpc error: code = NotFound desc = could not find container \"3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad\": container with ID starting with 3ef573fbb4062d556850213abf4c2b58f694c6832e4bf7491885898839d17bad not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.395114 4783 scope.go:117] "RemoveContainer" containerID="55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.411920 4783 scope.go:117] "RemoveContainer" containerID="55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.412378 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626\": container with ID starting with 55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626 not found: ID does not exist" containerID="55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.412411 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626"} err="failed to get container status \"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626\": rpc error: code = NotFound desc = could not find container \"55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626\": container with ID starting with 55dd7f91d65ebeddfc3df48f01c1192eb92b05a8deccb6cf53cb2f9fd305b626 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.412435 4783 scope.go:117] "RemoveContainer" containerID="41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.424296 4783 scope.go:117] "RemoveContainer" containerID="569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.438322 4783 scope.go:117] "RemoveContainer" containerID="481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.450769 4783 scope.go:117] "RemoveContainer" containerID="41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.451100 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb\": container with ID starting with 41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb not found: ID does not exist" containerID="41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.451128 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb"} err="failed to get container status \"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb\": rpc error: code = NotFound desc = could not find container \"41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb\": container with ID starting with 41d2e03292bcbbbb95c93101e623f35752b2c03340f43c76c8e6e6085a7b43fb not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.451149 4783 scope.go:117] "RemoveContainer" containerID="569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.451501 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77\": container with ID starting with 569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77 not found: ID does not exist" containerID="569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.451554 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77"} err="failed to get container status \"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77\": rpc error: code = NotFound desc = could not find container \"569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77\": container with ID starting with 569c1f5e2fa2d763678c01c0f246e07b9228f8bd61db917ba0f0e6ff555b0c77 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.451583 4783 scope.go:117] "RemoveContainer" containerID="481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576" Sep 30 13:39:56 crc kubenswrapper[4783]: E0930 13:39:56.451864 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576\": container with ID starting with 481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576 not found: ID does not exist" containerID="481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.451889 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576"} err="failed to get container status \"481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576\": rpc error: code = NotFound desc = could not find container \"481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576\": container with ID starting with 481e0a44c1dc0b8b685a9999924deac0570fe6f039a5a5801675e142c4cf0576 not found: ID does not exist" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.849699 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" path="/var/lib/kubelet/pods/66113e2e-c750-47b9-be53-81e4eddd9202/volumes" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.850613 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" path="/var/lib/kubelet/pods/8d67aba9-ee2c-4608-98d5-f3e6f248ea66/volumes" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.851474 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c12832-2428-4e1c-b9de-18936239646c" path="/var/lib/kubelet/pods/91c12832-2428-4e1c-b9de-18936239646c/volumes" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.853057 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" path="/var/lib/kubelet/pods/94e799b5-88f4-4957-99b6-112c0dc06105/volumes" Sep 30 13:39:56 crc kubenswrapper[4783]: I0930 13:39:56.853992 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" path="/var/lib/kubelet/pods/ed506947-4aea-481f-92e6-be13c8bb206b/volumes" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.181302 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lrx8q" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379203 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-74bz9"] Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379383 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c12832-2428-4e1c-b9de-18936239646c" containerName="marketplace-operator" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379395 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c12832-2428-4e1c-b9de-18936239646c" containerName="marketplace-operator" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379404 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379410 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379419 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379425 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379435 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379440 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379450 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379455 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379464 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379469 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379480 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379485 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379492 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379497 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379505 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379510 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379517 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379522 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="extract-content" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379529 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379535 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379542 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379549 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: E0930 13:39:57.379557 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379562 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="extract-utilities" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379638 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed506947-4aea-481f-92e6-be13c8bb206b" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379650 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c12832-2428-4e1c-b9de-18936239646c" containerName="marketplace-operator" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379663 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="66113e2e-c750-47b9-be53-81e4eddd9202" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379671 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="94e799b5-88f4-4957-99b6-112c0dc06105" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.379679 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d67aba9-ee2c-4608-98d5-f3e6f248ea66" containerName="registry-server" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.380277 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.387181 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.390620 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74bz9"] Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.464694 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-utilities\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.464744 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-catalog-content\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.464867 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2vx5\" (UniqueName: \"kubernetes.io/projected/f337bfc3-dbc0-44a9-94ac-26e55012e353-kube-api-access-q2vx5\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.566017 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2vx5\" (UniqueName: \"kubernetes.io/projected/f337bfc3-dbc0-44a9-94ac-26e55012e353-kube-api-access-q2vx5\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.566155 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-utilities\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.566196 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-catalog-content\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.566646 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-utilities\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.566877 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f337bfc3-dbc0-44a9-94ac-26e55012e353-catalog-content\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.582955 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2vx5\" (UniqueName: \"kubernetes.io/projected/f337bfc3-dbc0-44a9-94ac-26e55012e353-kube-api-access-q2vx5\") pod \"redhat-marketplace-74bz9\" (UID: \"f337bfc3-dbc0-44a9-94ac-26e55012e353\") " pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.586954 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.588265 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.590057 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.597034 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.666952 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.666988 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.667060 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sllc\" (UniqueName: \"kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.700667 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.767817 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.767864 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sllc\" (UniqueName: \"kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.767930 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.768282 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.768281 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.789281 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sllc\" (UniqueName: \"kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc\") pod \"community-operators-xrc9l\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:57 crc kubenswrapper[4783]: I0930 13:39:57.956195 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:39:58 crc kubenswrapper[4783]: I0930 13:39:58.099204 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74bz9"] Sep 30 13:39:58 crc kubenswrapper[4783]: I0930 13:39:58.180986 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74bz9" event={"ID":"f337bfc3-dbc0-44a9-94ac-26e55012e353","Type":"ContainerStarted","Data":"b4e6dd59f5bb0732347b616873b6763589faf294880d0aa1315494151b705099"} Sep 30 13:39:58 crc kubenswrapper[4783]: I0930 13:39:58.330435 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 13:39:58 crc kubenswrapper[4783]: W0930 13:39:58.337255 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a782e0d_6419_465e_be5e_c33abcf4aedb.slice/crio-9d9607aed8bb99e0fd5802935b309d5bf1ea5e9fcc6560f92054d89538427ae9 WatchSource:0}: Error finding container 9d9607aed8bb99e0fd5802935b309d5bf1ea5e9fcc6560f92054d89538427ae9: Status 404 returned error can't find the container with id 9d9607aed8bb99e0fd5802935b309d5bf1ea5e9fcc6560f92054d89538427ae9 Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.186723 4783 generic.go:334] "Generic (PLEG): container finished" podID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerID="7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb" exitCode=0 Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.186804 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerDied","Data":"7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb"} Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.187112 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerStarted","Data":"9d9607aed8bb99e0fd5802935b309d5bf1ea5e9fcc6560f92054d89538427ae9"} Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.188647 4783 generic.go:334] "Generic (PLEG): container finished" podID="f337bfc3-dbc0-44a9-94ac-26e55012e353" containerID="31cac9018bd0751a5b4b9a9251ac039e5e74ea5a1e7da8375f5e53bd93a3b6b0" exitCode=0 Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.188681 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74bz9" event={"ID":"f337bfc3-dbc0-44a9-94ac-26e55012e353","Type":"ContainerDied","Data":"31cac9018bd0751a5b4b9a9251ac039e5e74ea5a1e7da8375f5e53bd93a3b6b0"} Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.783032 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.785555 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.791153 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.791256 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbsqz\" (UniqueName: \"kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.791287 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.791709 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.794832 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.893389 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.893739 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbsqz\" (UniqueName: \"kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.893768 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.893832 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.894163 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.916745 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbsqz\" (UniqueName: \"kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz\") pod \"redhat-operators-mn9rq\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.979985 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m4cxc"] Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.980881 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.982861 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.991710 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4cxc"] Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.995150 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhcsq\" (UniqueName: \"kubernetes.io/projected/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-kube-api-access-fhcsq\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.995307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-catalog-content\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:39:59 crc kubenswrapper[4783]: I0930 13:39:59.995410 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-utilities\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.096065 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhcsq\" (UniqueName: \"kubernetes.io/projected/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-kube-api-access-fhcsq\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.096122 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-catalog-content\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.096164 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-utilities\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.096657 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-utilities\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.096933 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-catalog-content\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.112040 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhcsq\" (UniqueName: \"kubernetes.io/projected/cfaadd1a-d3fd-4153-8a32-6684619bcb8c-kube-api-access-fhcsq\") pod \"certified-operators-m4cxc\" (UID: \"cfaadd1a-d3fd-4153-8a32-6684619bcb8c\") " pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.117196 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.212410 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerStarted","Data":"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c"} Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.217081 4783 generic.go:334] "Generic (PLEG): container finished" podID="f337bfc3-dbc0-44a9-94ac-26e55012e353" containerID="a18a43ca47a9c9e6c26a7c82cffc7511f43018ebf981a8506a21bf9bba2ca16a" exitCode=0 Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.217107 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74bz9" event={"ID":"f337bfc3-dbc0-44a9-94ac-26e55012e353","Type":"ContainerDied","Data":"a18a43ca47a9c9e6c26a7c82cffc7511f43018ebf981a8506a21bf9bba2ca16a"} Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.312296 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 13:40:00 crc kubenswrapper[4783]: W0930 13:40:00.327124 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25ae6168_8819_424c_a382_02f0c3d7b386.slice/crio-5923a981d708a2cb5d546b4c220178f32b69e1d33feac0d2637f72fd819bea57 WatchSource:0}: Error finding container 5923a981d708a2cb5d546b4c220178f32b69e1d33feac0d2637f72fd819bea57: Status 404 returned error can't find the container with id 5923a981d708a2cb5d546b4c220178f32b69e1d33feac0d2637f72fd819bea57 Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.376103 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:00 crc kubenswrapper[4783]: I0930 13:40:00.775887 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4cxc"] Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.223560 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74bz9" event={"ID":"f337bfc3-dbc0-44a9-94ac-26e55012e353","Type":"ContainerStarted","Data":"6efebccce157e824118f6b27f4dff6de737b93912adb633c06c4368ec0466aa1"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.232269 4783 generic.go:334] "Generic (PLEG): container finished" podID="cfaadd1a-d3fd-4153-8a32-6684619bcb8c" containerID="097949292e2762573b27d07db3dfc6257910c948bec5c09cdffcdb1cd8d88cff" exitCode=0 Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.232390 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4cxc" event={"ID":"cfaadd1a-d3fd-4153-8a32-6684619bcb8c","Type":"ContainerDied","Data":"097949292e2762573b27d07db3dfc6257910c948bec5c09cdffcdb1cd8d88cff"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.232430 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4cxc" event={"ID":"cfaadd1a-d3fd-4153-8a32-6684619bcb8c","Type":"ContainerStarted","Data":"6facd5fcd044cb6442c9944fdaa433207e868e18e3fdba9e60d987ea008feeec"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.234784 4783 generic.go:334] "Generic (PLEG): container finished" podID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerID="e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c" exitCode=0 Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.234786 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerDied","Data":"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.237830 4783 generic.go:334] "Generic (PLEG): container finished" podID="25ae6168-8819-424c-a382-02f0c3d7b386" containerID="9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e" exitCode=0 Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.237880 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerDied","Data":"9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.237905 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerStarted","Data":"5923a981d708a2cb5d546b4c220178f32b69e1d33feac0d2637f72fd819bea57"} Sep 30 13:40:01 crc kubenswrapper[4783]: I0930 13:40:01.248132 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-74bz9" podStartSLOduration=2.529458703 podStartE2EDuration="4.24811396s" podCreationTimestamp="2025-09-30 13:39:57 +0000 UTC" firstStartedPulling="2025-09-30 13:39:59.19261459 +0000 UTC m=+299.124080897" lastFinishedPulling="2025-09-30 13:40:00.911269847 +0000 UTC m=+300.842736154" observedRunningTime="2025-09-30 13:40:01.245098194 +0000 UTC m=+301.176564541" watchObservedRunningTime="2025-09-30 13:40:01.24811396 +0000 UTC m=+301.179580267" Sep 30 13:40:02 crc kubenswrapper[4783]: I0930 13:40:02.248658 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerStarted","Data":"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1"} Sep 30 13:40:03 crc kubenswrapper[4783]: I0930 13:40:03.255634 4783 generic.go:334] "Generic (PLEG): container finished" podID="cfaadd1a-d3fd-4153-8a32-6684619bcb8c" containerID="ad9f8f951432900db142f03ac1e1463fca660a035a1b967bff043d60ef55fec0" exitCode=0 Sep 30 13:40:03 crc kubenswrapper[4783]: I0930 13:40:03.255677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4cxc" event={"ID":"cfaadd1a-d3fd-4153-8a32-6684619bcb8c","Type":"ContainerDied","Data":"ad9f8f951432900db142f03ac1e1463fca660a035a1b967bff043d60ef55fec0"} Sep 30 13:40:03 crc kubenswrapper[4783]: I0930 13:40:03.276732 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xrc9l" podStartSLOduration=3.462795429 podStartE2EDuration="6.276712489s" podCreationTimestamp="2025-09-30 13:39:57 +0000 UTC" firstStartedPulling="2025-09-30 13:39:59.187943932 +0000 UTC m=+299.119410239" lastFinishedPulling="2025-09-30 13:40:02.001860982 +0000 UTC m=+301.933327299" observedRunningTime="2025-09-30 13:40:03.273317941 +0000 UTC m=+303.204784288" watchObservedRunningTime="2025-09-30 13:40:03.276712489 +0000 UTC m=+303.208178806" Sep 30 13:40:04 crc kubenswrapper[4783]: I0930 13:40:04.262291 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerStarted","Data":"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911"} Sep 30 13:40:05 crc kubenswrapper[4783]: I0930 13:40:05.269010 4783 generic.go:334] "Generic (PLEG): container finished" podID="25ae6168-8819-424c-a382-02f0c3d7b386" containerID="d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911" exitCode=0 Sep 30 13:40:05 crc kubenswrapper[4783]: I0930 13:40:05.269661 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerDied","Data":"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911"} Sep 30 13:40:05 crc kubenswrapper[4783]: I0930 13:40:05.271844 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4cxc" event={"ID":"cfaadd1a-d3fd-4153-8a32-6684619bcb8c","Type":"ContainerStarted","Data":"28985ad0d7cfe2d1463d12812b235e636dd617629eb19fb1474f5529adae92a4"} Sep 30 13:40:05 crc kubenswrapper[4783]: I0930 13:40:05.310373 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m4cxc" podStartSLOduration=3.148211597 podStartE2EDuration="6.310331798s" podCreationTimestamp="2025-09-30 13:39:59 +0000 UTC" firstStartedPulling="2025-09-30 13:40:01.234806989 +0000 UTC m=+301.166273296" lastFinishedPulling="2025-09-30 13:40:04.39692719 +0000 UTC m=+304.328393497" observedRunningTime="2025-09-30 13:40:05.308597982 +0000 UTC m=+305.240064309" watchObservedRunningTime="2025-09-30 13:40:05.310331798 +0000 UTC m=+305.241798105" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.284189 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerStarted","Data":"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75"} Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.308765 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mn9rq" podStartSLOduration=3.229572087 podStartE2EDuration="8.308743053s" podCreationTimestamp="2025-09-30 13:39:59 +0000 UTC" firstStartedPulling="2025-09-30 13:40:01.239833158 +0000 UTC m=+301.171299455" lastFinishedPulling="2025-09-30 13:40:06.319004124 +0000 UTC m=+306.250470421" observedRunningTime="2025-09-30 13:40:07.301471623 +0000 UTC m=+307.232937950" watchObservedRunningTime="2025-09-30 13:40:07.308743053 +0000 UTC m=+307.240209360" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.701792 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.701862 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.764528 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.957210 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:40:07 crc kubenswrapper[4783]: I0930 13:40:07.957361 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:40:08 crc kubenswrapper[4783]: I0930 13:40:08.008555 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:40:08 crc kubenswrapper[4783]: I0930 13:40:08.329581 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 13:40:08 crc kubenswrapper[4783]: I0930 13:40:08.333692 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-74bz9" Sep 30 13:40:10 crc kubenswrapper[4783]: I0930 13:40:10.118709 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:40:10 crc kubenswrapper[4783]: I0930 13:40:10.119090 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:40:10 crc kubenswrapper[4783]: I0930 13:40:10.377600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:10 crc kubenswrapper[4783]: I0930 13:40:10.377643 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:10 crc kubenswrapper[4783]: I0930 13:40:10.421288 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:11 crc kubenswrapper[4783]: I0930 13:40:11.165043 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mn9rq" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="registry-server" probeResult="failure" output=< Sep 30 13:40:11 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:40:11 crc kubenswrapper[4783]: > Sep 30 13:40:11 crc kubenswrapper[4783]: I0930 13:40:11.339054 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m4cxc" Sep 30 13:40:20 crc kubenswrapper[4783]: I0930 13:40:20.184405 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:40:20 crc kubenswrapper[4783]: I0930 13:40:20.254371 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 13:41:07 crc kubenswrapper[4783]: I0930 13:41:07.673676 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:41:07 crc kubenswrapper[4783]: I0930 13:41:07.674459 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:41:37 crc kubenswrapper[4783]: I0930 13:41:37.674661 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:41:37 crc kubenswrapper[4783]: I0930 13:41:37.675149 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:42:07 crc kubenswrapper[4783]: I0930 13:42:07.673556 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:42:07 crc kubenswrapper[4783]: I0930 13:42:07.674092 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:42:07 crc kubenswrapper[4783]: I0930 13:42:07.674141 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:42:07 crc kubenswrapper[4783]: I0930 13:42:07.674848 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:42:07 crc kubenswrapper[4783]: I0930 13:42:07.674897 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f" gracePeriod=600 Sep 30 13:42:08 crc kubenswrapper[4783]: I0930 13:42:08.025430 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f" exitCode=0 Sep 30 13:42:08 crc kubenswrapper[4783]: I0930 13:42:08.025527 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f"} Sep 30 13:42:08 crc kubenswrapper[4783]: I0930 13:42:08.025793 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79"} Sep 30 13:42:08 crc kubenswrapper[4783]: I0930 13:42:08.025833 4783 scope.go:117] "RemoveContainer" containerID="dea01439e0c3b34e035a7660a3838fe0399a4f4b03aa7d4a27e92982117e7055" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.590242 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fmgt9"] Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.591478 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.613584 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fmgt9"] Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.679983 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-trusted-ca\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680050 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680099 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q8m9\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-kube-api-access-2q8m9\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680126 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/820912b2-a154-44ec-b01e-edf404abf196-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680158 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/820912b2-a154-44ec-b01e-edf404abf196-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680184 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-registry-certificates\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680242 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-registry-tls\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.680264 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-bound-sa-token\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.701521 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781155 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-trusted-ca\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781256 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q8m9\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-kube-api-access-2q8m9\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781290 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/820912b2-a154-44ec-b01e-edf404abf196-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781328 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/820912b2-a154-44ec-b01e-edf404abf196-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781358 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-registry-certificates\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781400 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-registry-tls\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.781426 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-bound-sa-token\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.782286 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/820912b2-a154-44ec-b01e-edf404abf196-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.783133 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-trusted-ca\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.783646 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/820912b2-a154-44ec-b01e-edf404abf196-registry-certificates\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.788533 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-registry-tls\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.789818 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/820912b2-a154-44ec-b01e-edf404abf196-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.804433 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q8m9\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-kube-api-access-2q8m9\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.804737 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/820912b2-a154-44ec-b01e-edf404abf196-bound-sa-token\") pod \"image-registry-66df7c8f76-fmgt9\" (UID: \"820912b2-a154-44ec-b01e-edf404abf196\") " pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:16 crc kubenswrapper[4783]: I0930 13:43:16.908614 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:17 crc kubenswrapper[4783]: I0930 13:43:17.321886 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fmgt9"] Sep 30 13:43:17 crc kubenswrapper[4783]: I0930 13:43:17.496457 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" event={"ID":"820912b2-a154-44ec-b01e-edf404abf196","Type":"ContainerStarted","Data":"546c799c4349d00e6970c6439d6a0276b711da047df4bb81fc3b019553d9680b"} Sep 30 13:43:18 crc kubenswrapper[4783]: I0930 13:43:18.505604 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" event={"ID":"820912b2-a154-44ec-b01e-edf404abf196","Type":"ContainerStarted","Data":"81b6503a3b509d07e89f7d8e9a6bf90a3e63ee5910438293d4daad3c480865d5"} Sep 30 13:43:18 crc kubenswrapper[4783]: I0930 13:43:18.505997 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:18 crc kubenswrapper[4783]: I0930 13:43:18.539505 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" podStartSLOduration=2.539472468 podStartE2EDuration="2.539472468s" podCreationTimestamp="2025-09-30 13:43:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:43:18.529563634 +0000 UTC m=+498.461029991" watchObservedRunningTime="2025-09-30 13:43:18.539472468 +0000 UTC m=+498.470938815" Sep 30 13:43:36 crc kubenswrapper[4783]: I0930 13:43:36.915973 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fmgt9" Sep 30 13:43:36 crc kubenswrapper[4783]: I0930 13:43:36.988289 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.032567 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" podUID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" containerName="registry" containerID="cri-o://4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa" gracePeriod=30 Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.688561 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.778436 4783 generic.go:334] "Generic (PLEG): container finished" podID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" containerID="4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa" exitCode=0 Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.778477 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" event={"ID":"dfcd81db-a627-438a-92b3-f5fcabeff1c4","Type":"ContainerDied","Data":"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa"} Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.778490 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.778503 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwvfx" event={"ID":"dfcd81db-a627-438a-92b3-f5fcabeff1c4","Type":"ContainerDied","Data":"cbd59e15182837ebbaf932c7071efdf6dfcee99d6a32f25e0e813f988022b9ba"} Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.778523 4783 scope.go:117] "RemoveContainer" containerID="4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789553 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789611 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789644 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llj28\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789685 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789829 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.789900 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.790628 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.790767 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token\") pod \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\" (UID: \"dfcd81db-a627-438a-92b3-f5fcabeff1c4\") " Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.790430 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.791017 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.791520 4783 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.791544 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dfcd81db-a627-438a-92b3-f5fcabeff1c4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.794992 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.797322 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.797519 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28" (OuterVolumeSpecName: "kube-api-access-llj28") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "kube-api-access-llj28". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.797711 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.800908 4783 scope.go:117] "RemoveContainer" containerID="4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa" Sep 30 13:44:02 crc kubenswrapper[4783]: E0930 13:44:02.801338 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa\": container with ID starting with 4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa not found: ID does not exist" containerID="4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.801371 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa"} err="failed to get container status \"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa\": rpc error: code = NotFound desc = could not find container \"4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa\": container with ID starting with 4b5343a560bc8591999f9a675f9b075eb0cb04f4009fcbaebe95e254cca9f1fa not found: ID does not exist" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.805625 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.806756 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "dfcd81db-a627-438a-92b3-f5fcabeff1c4" (UID: "dfcd81db-a627-438a-92b3-f5fcabeff1c4"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.893129 4783 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dfcd81db-a627-438a-92b3-f5fcabeff1c4-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.893188 4783 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.893214 4783 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dfcd81db-a627-438a-92b3-f5fcabeff1c4-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.893270 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llj28\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-kube-api-access-llj28\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:02 crc kubenswrapper[4783]: I0930 13:44:02.893294 4783 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dfcd81db-a627-438a-92b3-f5fcabeff1c4-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:44:03 crc kubenswrapper[4783]: I0930 13:44:03.102894 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:44:03 crc kubenswrapper[4783]: I0930 13:44:03.107486 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwvfx"] Sep 30 13:44:04 crc kubenswrapper[4783]: I0930 13:44:04.851692 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" path="/var/lib/kubelet/pods/dfcd81db-a627-438a-92b3-f5fcabeff1c4/volumes" Sep 30 13:44:37 crc kubenswrapper[4783]: I0930 13:44:37.674457 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:44:37 crc kubenswrapper[4783]: I0930 13:44:37.675151 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.170526 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8"] Sep 30 13:45:00 crc kubenswrapper[4783]: E0930 13:45:00.171377 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" containerName="registry" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.171396 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" containerName="registry" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.171507 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfcd81db-a627-438a-92b3-f5fcabeff1c4" containerName="registry" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.172009 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.175568 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8"] Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.177604 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.179408 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.273823 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.273878 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rg2m\" (UniqueName: \"kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.273910 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.374345 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.374389 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rg2m\" (UniqueName: \"kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.374418 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.375573 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.383854 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.402042 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rg2m\" (UniqueName: \"kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m\") pod \"collect-profiles-29320665-9bzw8\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.488862 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:00 crc kubenswrapper[4783]: I0930 13:45:00.693353 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8"] Sep 30 13:45:01 crc kubenswrapper[4783]: I0930 13:45:01.145693 4783 generic.go:334] "Generic (PLEG): container finished" podID="90c7cdb2-f935-4694-ad68-07ea73c25b70" containerID="ce9821bdc097f0dfbbc3c212ef02aef00edf94fe6b99760a1b12dc407c0633f9" exitCode=0 Sep 30 13:45:01 crc kubenswrapper[4783]: I0930 13:45:01.145749 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" event={"ID":"90c7cdb2-f935-4694-ad68-07ea73c25b70","Type":"ContainerDied","Data":"ce9821bdc097f0dfbbc3c212ef02aef00edf94fe6b99760a1b12dc407c0633f9"} Sep 30 13:45:01 crc kubenswrapper[4783]: I0930 13:45:01.146078 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" event={"ID":"90c7cdb2-f935-4694-ad68-07ea73c25b70","Type":"ContainerStarted","Data":"0a60e5a59addfe72b74a84a8c8ef9a6901a29aa14317b7c5a7a96cbf86c14cd8"} Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.362945 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.397278 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume\") pod \"90c7cdb2-f935-4694-ad68-07ea73c25b70\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.397348 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume\") pod \"90c7cdb2-f935-4694-ad68-07ea73c25b70\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.397399 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rg2m\" (UniqueName: \"kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m\") pod \"90c7cdb2-f935-4694-ad68-07ea73c25b70\" (UID: \"90c7cdb2-f935-4694-ad68-07ea73c25b70\") " Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.398764 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume" (OuterVolumeSpecName: "config-volume") pod "90c7cdb2-f935-4694-ad68-07ea73c25b70" (UID: "90c7cdb2-f935-4694-ad68-07ea73c25b70"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.404146 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "90c7cdb2-f935-4694-ad68-07ea73c25b70" (UID: "90c7cdb2-f935-4694-ad68-07ea73c25b70"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.404382 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m" (OuterVolumeSpecName: "kube-api-access-5rg2m") pod "90c7cdb2-f935-4694-ad68-07ea73c25b70" (UID: "90c7cdb2-f935-4694-ad68-07ea73c25b70"). InnerVolumeSpecName "kube-api-access-5rg2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.498361 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90c7cdb2-f935-4694-ad68-07ea73c25b70-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.498399 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rg2m\" (UniqueName: \"kubernetes.io/projected/90c7cdb2-f935-4694-ad68-07ea73c25b70-kube-api-access-5rg2m\") on node \"crc\" DevicePath \"\"" Sep 30 13:45:02 crc kubenswrapper[4783]: I0930 13:45:02.498414 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90c7cdb2-f935-4694-ad68-07ea73c25b70-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 13:45:03 crc kubenswrapper[4783]: I0930 13:45:03.160885 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" event={"ID":"90c7cdb2-f935-4694-ad68-07ea73c25b70","Type":"ContainerDied","Data":"0a60e5a59addfe72b74a84a8c8ef9a6901a29aa14317b7c5a7a96cbf86c14cd8"} Sep 30 13:45:03 crc kubenswrapper[4783]: I0930 13:45:03.161330 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a60e5a59addfe72b74a84a8c8ef9a6901a29aa14317b7c5a7a96cbf86c14cd8" Sep 30 13:45:03 crc kubenswrapper[4783]: I0930 13:45:03.160923 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8" Sep 30 13:45:07 crc kubenswrapper[4783]: I0930 13:45:07.673540 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:45:07 crc kubenswrapper[4783]: I0930 13:45:07.673948 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:45:37 crc kubenswrapper[4783]: I0930 13:45:37.673941 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:45:37 crc kubenswrapper[4783]: I0930 13:45:37.674516 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:45:37 crc kubenswrapper[4783]: I0930 13:45:37.674568 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:45:37 crc kubenswrapper[4783]: I0930 13:45:37.675119 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:45:37 crc kubenswrapper[4783]: I0930 13:45:37.675184 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79" gracePeriod=600 Sep 30 13:45:38 crc kubenswrapper[4783]: I0930 13:45:38.382347 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79" exitCode=0 Sep 30 13:45:38 crc kubenswrapper[4783]: I0930 13:45:38.382426 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79"} Sep 30 13:45:38 crc kubenswrapper[4783]: I0930 13:45:38.383362 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779"} Sep 30 13:45:38 crc kubenswrapper[4783]: I0930 13:45:38.383403 4783 scope.go:117] "RemoveContainer" containerID="a2cfdce231ef87d0ab26fdeeae9d5e4950e3d20b1c7ba73fa68de0b35559b26f" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.449891 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-22xvs"] Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450807 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-controller" containerID="cri-o://0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450893 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="northd" containerID="cri-o://0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450950 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450917 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-node" containerID="cri-o://2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450991 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="nbdb" containerID="cri-o://9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450907 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="sbdb" containerID="cri-o://496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.450955 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-acl-logging" containerID="cri-o://15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.489659 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" containerID="cri-o://01301faa9f4a3968edbb1103dd149e3178bf6e13dff13cf619d8aa8ae8305e56" gracePeriod=30 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.932103 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovnkube-controller/3.log" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.935644 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-acl-logging/0.log" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936205 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-controller/0.log" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936831 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="01301faa9f4a3968edbb1103dd149e3178bf6e13dff13cf619d8aa8ae8305e56" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936876 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936893 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936910 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936925 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936938 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76" exitCode=0 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936951 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab" exitCode=143 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936966 4783 generic.go:334] "Generic (PLEG): container finished" podID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerID="0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0" exitCode=143 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.936890 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"01301faa9f4a3968edbb1103dd149e3178bf6e13dff13cf619d8aa8ae8305e56"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937105 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937148 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937176 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937194 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937203 4783 scope.go:117] "RemoveContainer" containerID="9760a4f22fd92dd635830ac3e18d203cb031b22f1a2dc3454c86c203489cd9a6" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937215 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937282 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.937309 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.940606 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/2.log" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.941368 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/1.log" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.941448 4783 generic.go:334] "Generic (PLEG): container finished" podID="e4186982-08f1-4809-be4f-25f86353ccf1" containerID="3c492124acd867cc378468935f2a044e00d8fbfc546541b973ef868724952798" exitCode=2 Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.941498 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerDied","Data":"3c492124acd867cc378468935f2a044e00d8fbfc546541b973ef868724952798"} Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.942366 4783 scope.go:117] "RemoveContainer" containerID="3c492124acd867cc378468935f2a044e00d8fbfc546541b973ef868724952798" Sep 30 13:47:07 crc kubenswrapper[4783]: E0930 13:47:07.942995 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-2pmr9_openshift-multus(e4186982-08f1-4809-be4f-25f86353ccf1)\"" pod="openshift-multus/multus-2pmr9" podUID="e4186982-08f1-4809-be4f-25f86353ccf1" Sep 30 13:47:07 crc kubenswrapper[4783]: I0930 13:47:07.999292 4783 scope.go:117] "RemoveContainer" containerID="70cf26cf8fb9a2eb04fa746718b72199e2dfe8b11a074f145579b0bb58652ef4" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.253759 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-acl-logging/0.log" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.254495 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-controller/0.log" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.255978 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345349 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rd852"] Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.345814 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-node" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345835 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-node" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.345851 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="northd" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345863 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="northd" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.345883 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345896 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.345938 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-acl-logging" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345951 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-acl-logging" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.345968 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="sbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.345980 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="sbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346003 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346015 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346035 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346047 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346062 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346074 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346088 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kubecfg-setup" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346099 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kubecfg-setup" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346118 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346130 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346149 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="nbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346161 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="nbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346183 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346197 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.346217 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90c7cdb2-f935-4694-ad68-07ea73c25b70" containerName="collect-profiles" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346362 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="90c7cdb2-f935-4694-ad68-07ea73c25b70" containerName="collect-profiles" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346555 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="90c7cdb2-f935-4694-ad68-07ea73c25b70" containerName="collect-profiles" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346583 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="northd" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346600 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346622 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346640 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346662 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="sbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346677 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346693 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346712 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346729 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovn-acl-logging" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346749 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="nbdb" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346818 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="kube-rbac-proxy-node" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.346857 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: E0930 13:47:08.347110 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.347125 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" containerName="ovnkube-controller" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.349250 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356757 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356831 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356889 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356931 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356950 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356978 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket" (OuterVolumeSpecName: "log-socket") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.356980 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357009 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357015 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357142 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357191 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357265 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357316 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357359 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357414 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357464 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357511 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357570 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357625 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357474 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357501 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357524 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357679 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-298pd\" (UniqueName: \"kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357727 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357812 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357857 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357903 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn\") pod \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\" (UID: \"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b\") " Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358059 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-slash\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358108 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-var-lib-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358530 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-ovn\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358616 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358671 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-log-socket\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358746 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-netns\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358798 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-node-log\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358854 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-script-lib\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358961 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359016 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32d70901-48e0-4650-b099-6df3e3cfa82b-ovn-node-metrics-cert\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359065 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-systemd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359132 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-env-overrides\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359180 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359273 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-config\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359335 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-kubelet\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359506 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8djh\" (UniqueName: \"kubernetes.io/projected/32d70901-48e0-4650-b099-6df3e3cfa82b-kube-api-access-d8djh\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359563 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-systemd-units\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359619 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-bin\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359669 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-netd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359718 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-etc-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359841 4783 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359872 4783 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359900 4783 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359923 4783 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359947 4783 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359971 4783 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359995 4783 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357524 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357550 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log" (OuterVolumeSpecName: "node-log") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357568 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357594 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357620 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.357645 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash" (OuterVolumeSpecName: "host-slash") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358043 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358092 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.358686 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.359492 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.365053 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd" (OuterVolumeSpecName: "kube-api-access-298pd") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "kube-api-access-298pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.365199 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.389044 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" (UID: "7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463031 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-env-overrides\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463092 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463124 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-config\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463156 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-kubelet\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463186 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8djh\" (UniqueName: \"kubernetes.io/projected/32d70901-48e0-4650-b099-6df3e3cfa82b-kube-api-access-d8djh\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463234 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-systemd-units\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463248 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463284 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-bin\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463337 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-netd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463349 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-bin\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463378 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-etc-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463418 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-slash\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463446 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-ovn\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463457 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-kubelet\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463496 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-var-lib-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463469 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-var-lib-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463534 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-cni-netd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463561 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-etc-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463565 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463590 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-slash\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463606 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-log-socket\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463616 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-ovn\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463650 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-netns\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463657 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-log-socket\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463677 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-node-log\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463713 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-script-lib\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463743 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463743 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-systemd-units\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463770 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32d70901-48e0-4650-b099-6df3e3cfa82b-ovn-node-metrics-cert\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463798 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-systemd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463813 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-ovn-kubernetes\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463860 4783 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463876 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-298pd\" (UniqueName: \"kubernetes.io/projected/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-kube-api-access-298pd\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463889 4783 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463902 4783 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463914 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463927 4783 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463938 4783 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463951 4783 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463965 4783 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.463979 4783 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464031 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-systemd\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464039 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-run-openvswitch\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464081 4783 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464079 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-host-run-netns\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464097 4783 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464065 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32d70901-48e0-4650-b099-6df3e3cfa82b-node-log\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464036 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-env-overrides\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464123 4783 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464548 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-config\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.464969 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32d70901-48e0-4650-b099-6df3e3cfa82b-ovnkube-script-lib\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.470247 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32d70901-48e0-4650-b099-6df3e3cfa82b-ovn-node-metrics-cert\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.481438 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8djh\" (UniqueName: \"kubernetes.io/projected/32d70901-48e0-4650-b099-6df3e3cfa82b-kube-api-access-d8djh\") pod \"ovnkube-node-rd852\" (UID: \"32d70901-48e0-4650-b099-6df3e3cfa82b\") " pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.679387 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.953329 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/2.log" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.955009 4783 generic.go:334] "Generic (PLEG): container finished" podID="32d70901-48e0-4650-b099-6df3e3cfa82b" containerID="1efcce60585655d208457f91e920eeb3d58a098eea5fd696118903969fef8bfa" exitCode=0 Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.955109 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerDied","Data":"1efcce60585655d208457f91e920eeb3d58a098eea5fd696118903969fef8bfa"} Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.955149 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"760ecacb5442798d6d00ea0b34213ea8e319cb5c8208bd970eb71cbfce168484"} Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.964542 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-acl-logging/0.log" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.965464 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-22xvs_7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/ovn-controller/0.log" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.966160 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" event={"ID":"7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b","Type":"ContainerDied","Data":"20c2ff68d53fc441954e6cce30c566ee27c77827f120df1f4864a64c949abad4"} Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.966278 4783 scope.go:117] "RemoveContainer" containerID="01301faa9f4a3968edbb1103dd149e3178bf6e13dff13cf619d8aa8ae8305e56" Sep 30 13:47:08 crc kubenswrapper[4783]: I0930 13:47:08.966356 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-22xvs" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.005409 4783 scope.go:117] "RemoveContainer" containerID="496b8a24b5969fad61c89a27bab55e3e6a2bcf1fab33344985301f8fa2f58dfb" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.025689 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-22xvs"] Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.029689 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-22xvs"] Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.054415 4783 scope.go:117] "RemoveContainer" containerID="9730c82ff2e41d59a88c716e34bb9f8490c612258c83d9d86b808be4e862466a" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.068528 4783 scope.go:117] "RemoveContainer" containerID="0c8eccf21f233ca6744d9043aa378c37f079dac24f05b0566b0a9a7c8d9c2bdf" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.096928 4783 scope.go:117] "RemoveContainer" containerID="86e1da82d756dfd6d1db5aec79cc62ce48e875efd24bea69793aa83c0b76d0ea" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.117827 4783 scope.go:117] "RemoveContainer" containerID="2cc5a280cd16ef969904570c4aeaa7ded0ecfda9b96643272a66997cc0626e76" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.135185 4783 scope.go:117] "RemoveContainer" containerID="15c1613e6169faa24048d21456d94be070e9863be8eddbc676be630c7d5b3fab" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.150143 4783 scope.go:117] "RemoveContainer" containerID="0740537f3e4bf61ed0706411be76095716414fa3517f8c45c886db51495812b0" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.163085 4783 scope.go:117] "RemoveContainer" containerID="86af61a4cbf2e066d6fb37dc983f4530dd3e8854a04eecb73f47d9971af30967" Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.975198 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"ac531abd4a6b34e723ae6a4e6dde2b5fb8638bee70e4f5c63f06b51d8cbe38b2"} Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.975537 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"32766f5c7ced41b9d94c848498464cedfdc01de72d2acf0b2d9bf8f03d1f31e3"} Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.975549 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"5c5e559220c82ba95db666d0060d9e7158d1403bfc0070697c14565d2d90222b"} Sep 30 13:47:09 crc kubenswrapper[4783]: I0930 13:47:09.975558 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"7a025b934ed125cc312c5b2e1e8370df74f9e341c9e6f3cb40483f9d334481c1"} Sep 30 13:47:10 crc kubenswrapper[4783]: I0930 13:47:10.850868 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b" path="/var/lib/kubelet/pods/7dab54f2-3ab6-480a-bfe8-8d8b17a7f81b/volumes" Sep 30 13:47:10 crc kubenswrapper[4783]: I0930 13:47:10.987414 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"26cc86b772eb8994f16b1d1a37728b3cc99aad8017aef1506fa18063001234d2"} Sep 30 13:47:10 crc kubenswrapper[4783]: I0930 13:47:10.987711 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"fa8a9e09f1c0b11ca8157ec3a86c00c701233d7076f3c06fb237300293f8e358"} Sep 30 13:47:13 crc kubenswrapper[4783]: I0930 13:47:13.004436 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"1557d3f0945fe58b608f58166d7c7f84c7c2b6cc1ea45ae7a61d48a1f27efbc6"} Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.903175 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-tbdk6"] Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.905075 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.908471 4783 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k2c62" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.908471 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.908474 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.911648 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.955238 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4wz4\" (UniqueName: \"kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.955363 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:14 crc kubenswrapper[4783]: I0930 13:47:14.955402 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.019114 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" event={"ID":"32d70901-48e0-4650-b099-6df3e3cfa82b","Type":"ContainerStarted","Data":"df99459a895cd8e6a3af0c3da158a91b41d8a9500cb6bbc6b91d1b7b5b241ae1"} Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.019342 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.019358 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.019368 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.047870 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.048750 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.050359 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" podStartSLOduration=7.050340989 podStartE2EDuration="7.050340989s" podCreationTimestamp="2025-09-30 13:47:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:47:15.04789065 +0000 UTC m=+734.979356957" watchObservedRunningTime="2025-09-30 13:47:15.050340989 +0000 UTC m=+734.981807296" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.056566 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4wz4\" (UniqueName: \"kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.056634 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.056676 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.056941 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.057555 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.077121 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4wz4\" (UniqueName: \"kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4\") pod \"crc-storage-crc-tbdk6\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.237294 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: E0930 13:47:15.262727 4783 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(1057c7487460ac95e9758cd4201fa66058685c725c32c1036b01b3f392383cbc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 13:47:15 crc kubenswrapper[4783]: E0930 13:47:15.262890 4783 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(1057c7487460ac95e9758cd4201fa66058685c725c32c1036b01b3f392383cbc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: E0930 13:47:15.262987 4783 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(1057c7487460ac95e9758cd4201fa66058685c725c32c1036b01b3f392383cbc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:15 crc kubenswrapper[4783]: E0930 13:47:15.263120 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tbdk6_crc-storage(54c00828-b66d-446c-8c0d-c43dc287fae9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tbdk6_crc-storage(54c00828-b66d-446c-8c0d-c43dc287fae9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(1057c7487460ac95e9758cd4201fa66058685c725c32c1036b01b3f392383cbc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tbdk6" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" Sep 30 13:47:15 crc kubenswrapper[4783]: I0930 13:47:15.374863 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tbdk6"] Sep 30 13:47:16 crc kubenswrapper[4783]: I0930 13:47:16.025516 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:16 crc kubenswrapper[4783]: I0930 13:47:16.026175 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:16 crc kubenswrapper[4783]: E0930 13:47:16.059322 4783 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(2e05be28ecac7790252bac682dd6d987aa6f7a18b8f90cd1653db0165e7485a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 13:47:16 crc kubenswrapper[4783]: E0930 13:47:16.059519 4783 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(2e05be28ecac7790252bac682dd6d987aa6f7a18b8f90cd1653db0165e7485a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:16 crc kubenswrapper[4783]: E0930 13:47:16.059575 4783 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(2e05be28ecac7790252bac682dd6d987aa6f7a18b8f90cd1653db0165e7485a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:16 crc kubenswrapper[4783]: E0930 13:47:16.059661 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tbdk6_crc-storage(54c00828-b66d-446c-8c0d-c43dc287fae9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tbdk6_crc-storage(54c00828-b66d-446c-8c0d-c43dc287fae9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tbdk6_crc-storage_54c00828-b66d-446c-8c0d-c43dc287fae9_0(2e05be28ecac7790252bac682dd6d987aa6f7a18b8f90cd1653db0165e7485a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tbdk6" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" Sep 30 13:47:21 crc kubenswrapper[4783]: I0930 13:47:21.842768 4783 scope.go:117] "RemoveContainer" containerID="3c492124acd867cc378468935f2a044e00d8fbfc546541b973ef868724952798" Sep 30 13:47:23 crc kubenswrapper[4783]: I0930 13:47:23.076711 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pmr9_e4186982-08f1-4809-be4f-25f86353ccf1/kube-multus/2.log" Sep 30 13:47:23 crc kubenswrapper[4783]: I0930 13:47:23.078428 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pmr9" event={"ID":"e4186982-08f1-4809-be4f-25f86353ccf1","Type":"ContainerStarted","Data":"617d45630966d27ea7a5f2ed36f48cc39a8c1be923a61e34d9ebf48d5f830e2c"} Sep 30 13:47:27 crc kubenswrapper[4783]: I0930 13:47:27.842963 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:27 crc kubenswrapper[4783]: I0930 13:47:27.845221 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:28 crc kubenswrapper[4783]: I0930 13:47:28.054493 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tbdk6"] Sep 30 13:47:28 crc kubenswrapper[4783]: I0930 13:47:28.060096 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:47:28 crc kubenswrapper[4783]: I0930 13:47:28.113834 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tbdk6" event={"ID":"54c00828-b66d-446c-8c0d-c43dc287fae9","Type":"ContainerStarted","Data":"2a2a6457300c6390db130e46a10b254bc5281c54ccd10580d666bbed3966e454"} Sep 30 13:47:31 crc kubenswrapper[4783]: I0930 13:47:31.132810 4783 generic.go:334] "Generic (PLEG): container finished" podID="54c00828-b66d-446c-8c0d-c43dc287fae9" containerID="d10c78d037207c4478abdea8a14b2538bb54b5e8cc77391b9b9b895af7100db0" exitCode=0 Sep 30 13:47:31 crc kubenswrapper[4783]: I0930 13:47:31.132954 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tbdk6" event={"ID":"54c00828-b66d-446c-8c0d-c43dc287fae9","Type":"ContainerDied","Data":"d10c78d037207c4478abdea8a14b2538bb54b5e8cc77391b9b9b895af7100db0"} Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.363404 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.396656 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt\") pod \"54c00828-b66d-446c-8c0d-c43dc287fae9\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.396784 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage\") pod \"54c00828-b66d-446c-8c0d-c43dc287fae9\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.396791 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "54c00828-b66d-446c-8c0d-c43dc287fae9" (UID: "54c00828-b66d-446c-8c0d-c43dc287fae9"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.396840 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4wz4\" (UniqueName: \"kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4\") pod \"54c00828-b66d-446c-8c0d-c43dc287fae9\" (UID: \"54c00828-b66d-446c-8c0d-c43dc287fae9\") " Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.397030 4783 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54c00828-b66d-446c-8c0d-c43dc287fae9-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.409523 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4" (OuterVolumeSpecName: "kube-api-access-c4wz4") pod "54c00828-b66d-446c-8c0d-c43dc287fae9" (UID: "54c00828-b66d-446c-8c0d-c43dc287fae9"). InnerVolumeSpecName "kube-api-access-c4wz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.414841 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "54c00828-b66d-446c-8c0d-c43dc287fae9" (UID: "54c00828-b66d-446c-8c0d-c43dc287fae9"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.498279 4783 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54c00828-b66d-446c-8c0d-c43dc287fae9-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.498502 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4wz4\" (UniqueName: \"kubernetes.io/projected/54c00828-b66d-446c-8c0d-c43dc287fae9-kube-api-access-c4wz4\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.910484 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:47:32 crc kubenswrapper[4783]: I0930 13:47:32.910665 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" containerID="cri-o://64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6" gracePeriod=30 Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.023547 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.023749 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" containerID="cri-o://51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e" gracePeriod=30 Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.122841 4783 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-fvldh container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.122890 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.133249 4783 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-vz2w9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.133302 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.145031 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tbdk6" event={"ID":"54c00828-b66d-446c-8c0d-c43dc287fae9","Type":"ContainerDied","Data":"2a2a6457300c6390db130e46a10b254bc5281c54ccd10580d666bbed3966e454"} Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.145063 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a2a6457300c6390db130e46a10b254bc5281c54ccd10580d666bbed3966e454" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.145063 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tbdk6" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.819573 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.896135 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.916642 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zclz\" (UniqueName: \"kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz\") pod \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.916758 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles\") pod \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.917752 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "96c43e6b-e8b0-4282-8882-cafa0a59c2d1" (UID: "96c43e6b-e8b0-4282-8882-cafa0a59c2d1"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.917809 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert\") pod \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.917855 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config\") pod \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.918266 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca\") pod \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\" (UID: \"96c43e6b-e8b0-4282-8882-cafa0a59c2d1\") " Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.918829 4783 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.918907 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config" (OuterVolumeSpecName: "config") pod "96c43e6b-e8b0-4282-8882-cafa0a59c2d1" (UID: "96c43e6b-e8b0-4282-8882-cafa0a59c2d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.919027 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca" (OuterVolumeSpecName: "client-ca") pod "96c43e6b-e8b0-4282-8882-cafa0a59c2d1" (UID: "96c43e6b-e8b0-4282-8882-cafa0a59c2d1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.921336 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "96c43e6b-e8b0-4282-8882-cafa0a59c2d1" (UID: "96c43e6b-e8b0-4282-8882-cafa0a59c2d1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:47:33 crc kubenswrapper[4783]: I0930 13:47:33.923370 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz" (OuterVolumeSpecName: "kube-api-access-4zclz") pod "96c43e6b-e8b0-4282-8882-cafa0a59c2d1" (UID: "96c43e6b-e8b0-4282-8882-cafa0a59c2d1"). InnerVolumeSpecName "kube-api-access-4zclz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.019790 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9jwp\" (UniqueName: \"kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp\") pod \"ba290d22-ab6f-413c-9dfc-3285b83488ed\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.019922 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert\") pod \"ba290d22-ab6f-413c-9dfc-3285b83488ed\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.019971 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config\") pod \"ba290d22-ab6f-413c-9dfc-3285b83488ed\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.019994 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca\") pod \"ba290d22-ab6f-413c-9dfc-3285b83488ed\" (UID: \"ba290d22-ab6f-413c-9dfc-3285b83488ed\") " Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020278 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zclz\" (UniqueName: \"kubernetes.io/projected/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-kube-api-access-4zclz\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020298 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020310 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020321 4783 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96c43e6b-e8b0-4282-8882-cafa0a59c2d1-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020662 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca" (OuterVolumeSpecName: "client-ca") pod "ba290d22-ab6f-413c-9dfc-3285b83488ed" (UID: "ba290d22-ab6f-413c-9dfc-3285b83488ed"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.020671 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config" (OuterVolumeSpecName: "config") pod "ba290d22-ab6f-413c-9dfc-3285b83488ed" (UID: "ba290d22-ab6f-413c-9dfc-3285b83488ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.023586 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ba290d22-ab6f-413c-9dfc-3285b83488ed" (UID: "ba290d22-ab6f-413c-9dfc-3285b83488ed"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.023673 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp" (OuterVolumeSpecName: "kube-api-access-k9jwp") pod "ba290d22-ab6f-413c-9dfc-3285b83488ed" (UID: "ba290d22-ab6f-413c-9dfc-3285b83488ed"). InnerVolumeSpecName "kube-api-access-k9jwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.121749 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9jwp\" (UniqueName: \"kubernetes.io/projected/ba290d22-ab6f-413c-9dfc-3285b83488ed-kube-api-access-k9jwp\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.121799 4783 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba290d22-ab6f-413c-9dfc-3285b83488ed-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.121818 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.121834 4783 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ba290d22-ab6f-413c-9dfc-3285b83488ed-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.154320 4783 generic.go:334] "Generic (PLEG): container finished" podID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerID="51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e" exitCode=0 Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.154382 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.154432 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" event={"ID":"ba290d22-ab6f-413c-9dfc-3285b83488ed","Type":"ContainerDied","Data":"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e"} Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.154469 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh" event={"ID":"ba290d22-ab6f-413c-9dfc-3285b83488ed","Type":"ContainerDied","Data":"074d53e8e746f0b5f36ae2513c0f29c6b74f7d4ceaff880c8862d124411cfa4e"} Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.154498 4783 scope.go:117] "RemoveContainer" containerID="51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.160204 4783 generic.go:334] "Generic (PLEG): container finished" podID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerID="64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6" exitCode=0 Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.160288 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" event={"ID":"96c43e6b-e8b0-4282-8882-cafa0a59c2d1","Type":"ContainerDied","Data":"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6"} Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.160325 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" event={"ID":"96c43e6b-e8b0-4282-8882-cafa0a59c2d1","Type":"ContainerDied","Data":"c1d5e19b0552471254c4262a41bc0e8331f3bf684cf3cb0a91d87169463868c1"} Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.160502 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vz2w9" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.180135 4783 scope.go:117] "RemoveContainer" containerID="51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e" Sep 30 13:47:34 crc kubenswrapper[4783]: E0930 13:47:34.180864 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e\": container with ID starting with 51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e not found: ID does not exist" containerID="51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.180909 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e"} err="failed to get container status \"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e\": rpc error: code = NotFound desc = could not find container \"51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e\": container with ID starting with 51d3e5af2a3ba403a7c12c698f823f2b14e7a99f114a873afdb43b3d4b4e5d2e not found: ID does not exist" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.180936 4783 scope.go:117] "RemoveContainer" containerID="64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.199840 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.203043 4783 scope.go:117] "RemoveContainer" containerID="64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6" Sep 30 13:47:34 crc kubenswrapper[4783]: E0930 13:47:34.203556 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6\": container with ID starting with 64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6 not found: ID does not exist" containerID="64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.203618 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6"} err="failed to get container status \"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6\": rpc error: code = NotFound desc = could not find container \"64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6\": container with ID starting with 64c9114d1d91a891a4c65469fbc6f7de72984314754ff2863618cb3a4194b8f6 not found: ID does not exist" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.209025 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vz2w9"] Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.221005 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.224917 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fvldh"] Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.851327 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" path="/var/lib/kubelet/pods/96c43e6b-e8b0-4282-8882-cafa0a59c2d1/volumes" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.852087 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" path="/var/lib/kubelet/pods/ba290d22-ab6f-413c-9dfc-3285b83488ed/volumes" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.971940 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v"] Sep 30 13:47:34 crc kubenswrapper[4783]: E0930 13:47:34.973039 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.978613 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: E0930 13:47:34.978680 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" containerName="storage" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.978692 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" containerName="storage" Sep 30 13:47:34 crc kubenswrapper[4783]: E0930 13:47:34.978723 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.978731 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.979066 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c43e6b-e8b0-4282-8882-cafa0a59c2d1" containerName="controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.979088 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" containerName="storage" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.979099 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba290d22-ab6f-413c-9dfc-3285b83488ed" containerName="route-controller-manager" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.979559 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d79556cfc-bp2zg"] Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.980730 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.981206 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.995548 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.995568 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.995841 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.996020 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.996867 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.997196 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 13:47:34 crc kubenswrapper[4783]: I0930 13:47:34.997389 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:34.998045 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:34.998219 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:34.998398 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:34.998457 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:34.998508 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.005495 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v"] Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.010335 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.013721 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d79556cfc-bp2zg"] Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044697 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jh46\" (UniqueName: \"kubernetes.io/projected/5e2b57d5-afaa-46a9-8600-db743c3fa803-kube-api-access-6jh46\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044739 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-client-ca\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044807 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-config\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044831 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2b57d5-afaa-46a9-8600-db743c3fa803-serving-cert\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044847 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-client-ca\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044870 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-config\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t2t9\" (UniqueName: \"kubernetes.io/projected/8da8228f-d4ac-4fd0-bb82-6ae370685493-kube-api-access-2t2t9\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044915 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-proxy-ca-bundles\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.044957 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da8228f-d4ac-4fd0-bb82-6ae370685493-serving-cert\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146109 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da8228f-d4ac-4fd0-bb82-6ae370685493-serving-cert\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146186 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jh46\" (UniqueName: \"kubernetes.io/projected/5e2b57d5-afaa-46a9-8600-db743c3fa803-kube-api-access-6jh46\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146246 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-client-ca\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146277 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-config\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146316 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2b57d5-afaa-46a9-8600-db743c3fa803-serving-cert\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146350 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-client-ca\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146394 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-config\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146434 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t2t9\" (UniqueName: \"kubernetes.io/projected/8da8228f-d4ac-4fd0-bb82-6ae370685493-kube-api-access-2t2t9\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.146461 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-proxy-ca-bundles\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.148448 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-config\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.148670 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-client-ca\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.148724 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2b57d5-afaa-46a9-8600-db743c3fa803-client-ca\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.148958 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-proxy-ca-bundles\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.150423 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da8228f-d4ac-4fd0-bb82-6ae370685493-config\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.154267 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da8228f-d4ac-4fd0-bb82-6ae370685493-serving-cert\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.154881 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2b57d5-afaa-46a9-8600-db743c3fa803-serving-cert\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.170769 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t2t9\" (UniqueName: \"kubernetes.io/projected/8da8228f-d4ac-4fd0-bb82-6ae370685493-kube-api-access-2t2t9\") pod \"controller-manager-6d79556cfc-bp2zg\" (UID: \"8da8228f-d4ac-4fd0-bb82-6ae370685493\") " pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.173139 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jh46\" (UniqueName: \"kubernetes.io/projected/5e2b57d5-afaa-46a9-8600-db743c3fa803-kube-api-access-6jh46\") pod \"route-controller-manager-55487d48ff-xtm8v\" (UID: \"5e2b57d5-afaa-46a9-8600-db743c3fa803\") " pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.317360 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.328476 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.524444 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v"] Sep 30 13:47:35 crc kubenswrapper[4783]: W0930 13:47:35.533427 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e2b57d5_afaa_46a9_8600_db743c3fa803.slice/crio-a3f86d3252ab8287a772cb7bcc54036b2605691988bb9cc5c552a8b7f763b493 WatchSource:0}: Error finding container a3f86d3252ab8287a772cb7bcc54036b2605691988bb9cc5c552a8b7f763b493: Status 404 returned error can't find the container with id a3f86d3252ab8287a772cb7bcc54036b2605691988bb9cc5c552a8b7f763b493 Sep 30 13:47:35 crc kubenswrapper[4783]: I0930 13:47:35.551736 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d79556cfc-bp2zg"] Sep 30 13:47:35 crc kubenswrapper[4783]: W0930 13:47:35.559858 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8da8228f_d4ac_4fd0_bb82_6ae370685493.slice/crio-3c997e96804ca24ea3f41f40ebc8017edf291731ae7f6d032fc579266720ef1c WatchSource:0}: Error finding container 3c997e96804ca24ea3f41f40ebc8017edf291731ae7f6d032fc579266720ef1c: Status 404 returned error can't find the container with id 3c997e96804ca24ea3f41f40ebc8017edf291731ae7f6d032fc579266720ef1c Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.176666 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" event={"ID":"5e2b57d5-afaa-46a9-8600-db743c3fa803","Type":"ContainerStarted","Data":"21d8e3895ae33b2bca16b34c7c798a75cc07f948a687cff0dfffc52226d443ac"} Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.176713 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" event={"ID":"5e2b57d5-afaa-46a9-8600-db743c3fa803","Type":"ContainerStarted","Data":"a3f86d3252ab8287a772cb7bcc54036b2605691988bb9cc5c552a8b7f763b493"} Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.176878 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.178341 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" event={"ID":"8da8228f-d4ac-4fd0-bb82-6ae370685493","Type":"ContainerStarted","Data":"70521aa9aea5bb9ee8e75fcd4f89a897dc6604e74593157ae6025079317eb4b1"} Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.178381 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" event={"ID":"8da8228f-d4ac-4fd0-bb82-6ae370685493","Type":"ContainerStarted","Data":"3c997e96804ca24ea3f41f40ebc8017edf291731ae7f6d032fc579266720ef1c"} Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.178554 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.184108 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.197825 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" podStartSLOduration=3.197803415 podStartE2EDuration="3.197803415s" podCreationTimestamp="2025-09-30 13:47:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:47:36.194785319 +0000 UTC m=+756.126251626" watchObservedRunningTime="2025-09-30 13:47:36.197803415 +0000 UTC m=+756.129269742" Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.213865 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d79556cfc-bp2zg" podStartSLOduration=3.213847085 podStartE2EDuration="3.213847085s" podCreationTimestamp="2025-09-30 13:47:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:47:36.212315647 +0000 UTC m=+756.143781964" watchObservedRunningTime="2025-09-30 13:47:36.213847085 +0000 UTC m=+756.145313382" Sep 30 13:47:36 crc kubenswrapper[4783]: I0930 13:47:36.397098 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-55487d48ff-xtm8v" Sep 30 13:47:37 crc kubenswrapper[4783]: I0930 13:47:37.673614 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:47:37 crc kubenswrapper[4783]: I0930 13:47:37.673680 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:47:38 crc kubenswrapper[4783]: I0930 13:47:38.703514 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rd852" Sep 30 13:47:39 crc kubenswrapper[4783]: I0930 13:47:39.642854 4783 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.194745 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq"] Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.195628 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.197409 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.206364 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq"] Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.312110 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r68xz\" (UniqueName: \"kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.312180 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.312419 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.413603 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.413719 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.413800 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r68xz\" (UniqueName: \"kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.414569 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.414748 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.433178 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r68xz\" (UniqueName: \"kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.512030 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:40 crc kubenswrapper[4783]: I0930 13:47:40.924486 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq"] Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.211752 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerStarted","Data":"92b9b40db5478ba11871b01ea6f7951fc0ed5433fbc9e94b4655f1bcbf25a36f"} Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.212136 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerStarted","Data":"18353431ccf38d1b356e469e7d2f324250a74040968ca8c29a1029b626e574b0"} Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.609186 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.610437 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.624952 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.729902 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.729975 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.730007 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pwsh\" (UniqueName: \"kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.831845 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.831905 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.831936 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pwsh\" (UniqueName: \"kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.833206 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.833213 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.854915 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pwsh\" (UniqueName: \"kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh\") pod \"redhat-operators-jpfnb\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:41 crc kubenswrapper[4783]: I0930 13:47:41.931948 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:42 crc kubenswrapper[4783]: I0930 13:47:42.222889 4783 generic.go:334] "Generic (PLEG): container finished" podID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerID="92b9b40db5478ba11871b01ea6f7951fc0ed5433fbc9e94b4655f1bcbf25a36f" exitCode=0 Sep 30 13:47:42 crc kubenswrapper[4783]: I0930 13:47:42.223010 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerDied","Data":"92b9b40db5478ba11871b01ea6f7951fc0ed5433fbc9e94b4655f1bcbf25a36f"} Sep 30 13:47:42 crc kubenswrapper[4783]: I0930 13:47:42.338452 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:42 crc kubenswrapper[4783]: W0930 13:47:42.347710 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbbc5ef0_9a28_4e11_8d4a_240ebf2ff2cd.slice/crio-32f84430f4d593fb9d7ba8a820610f752264c91ca7eb4e849342341fabd466f4 WatchSource:0}: Error finding container 32f84430f4d593fb9d7ba8a820610f752264c91ca7eb4e849342341fabd466f4: Status 404 returned error can't find the container with id 32f84430f4d593fb9d7ba8a820610f752264c91ca7eb4e849342341fabd466f4 Sep 30 13:47:43 crc kubenswrapper[4783]: I0930 13:47:43.231394 4783 generic.go:334] "Generic (PLEG): container finished" podID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerID="db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641" exitCode=0 Sep 30 13:47:43 crc kubenswrapper[4783]: I0930 13:47:43.231490 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerDied","Data":"db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641"} Sep 30 13:47:43 crc kubenswrapper[4783]: I0930 13:47:43.233825 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerStarted","Data":"32f84430f4d593fb9d7ba8a820610f752264c91ca7eb4e849342341fabd466f4"} Sep 30 13:47:44 crc kubenswrapper[4783]: I0930 13:47:44.244676 4783 generic.go:334] "Generic (PLEG): container finished" podID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerID="821a5df18b08d6880e747cc53554ebb5af934e299d953f59c05daa4070f867bf" exitCode=0 Sep 30 13:47:44 crc kubenswrapper[4783]: I0930 13:47:44.244748 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerDied","Data":"821a5df18b08d6880e747cc53554ebb5af934e299d953f59c05daa4070f867bf"} Sep 30 13:47:44 crc kubenswrapper[4783]: I0930 13:47:44.253101 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerStarted","Data":"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895"} Sep 30 13:47:45 crc kubenswrapper[4783]: I0930 13:47:45.261541 4783 generic.go:334] "Generic (PLEG): container finished" podID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerID="2c3943c570227820f55b790010ac0f7b86a9119360a5f557ec83bc14483e3508" exitCode=0 Sep 30 13:47:45 crc kubenswrapper[4783]: I0930 13:47:45.261632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerDied","Data":"2c3943c570227820f55b790010ac0f7b86a9119360a5f557ec83bc14483e3508"} Sep 30 13:47:45 crc kubenswrapper[4783]: I0930 13:47:45.263580 4783 generic.go:334] "Generic (PLEG): container finished" podID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerID="067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895" exitCode=0 Sep 30 13:47:45 crc kubenswrapper[4783]: I0930 13:47:45.263632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerDied","Data":"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895"} Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.273773 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerStarted","Data":"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d"} Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.305428 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jpfnb" podStartSLOduration=2.722338692 podStartE2EDuration="5.305406544s" podCreationTimestamp="2025-09-30 13:47:41 +0000 UTC" firstStartedPulling="2025-09-30 13:47:43.233077492 +0000 UTC m=+763.164543799" lastFinishedPulling="2025-09-30 13:47:45.816145334 +0000 UTC m=+765.747611651" observedRunningTime="2025-09-30 13:47:46.298330839 +0000 UTC m=+766.229797176" watchObservedRunningTime="2025-09-30 13:47:46.305406544 +0000 UTC m=+766.236872861" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.674660 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.795048 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r68xz\" (UniqueName: \"kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz\") pod \"d73fa222-1e59-42b8-bb54-9a47ead75914\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.795216 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util\") pod \"d73fa222-1e59-42b8-bb54-9a47ead75914\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.795330 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle\") pod \"d73fa222-1e59-42b8-bb54-9a47ead75914\" (UID: \"d73fa222-1e59-42b8-bb54-9a47ead75914\") " Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.796453 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle" (OuterVolumeSpecName: "bundle") pod "d73fa222-1e59-42b8-bb54-9a47ead75914" (UID: "d73fa222-1e59-42b8-bb54-9a47ead75914"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.807430 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz" (OuterVolumeSpecName: "kube-api-access-r68xz") pod "d73fa222-1e59-42b8-bb54-9a47ead75914" (UID: "d73fa222-1e59-42b8-bb54-9a47ead75914"). InnerVolumeSpecName "kube-api-access-r68xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.809553 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util" (OuterVolumeSpecName: "util") pod "d73fa222-1e59-42b8-bb54-9a47ead75914" (UID: "d73fa222-1e59-42b8-bb54-9a47ead75914"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.896437 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r68xz\" (UniqueName: \"kubernetes.io/projected/d73fa222-1e59-42b8-bb54-9a47ead75914-kube-api-access-r68xz\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.896608 4783 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-util\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:46 crc kubenswrapper[4783]: I0930 13:47:46.896623 4783 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73fa222-1e59-42b8-bb54-9a47ead75914-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:47 crc kubenswrapper[4783]: I0930 13:47:47.283496 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" event={"ID":"d73fa222-1e59-42b8-bb54-9a47ead75914","Type":"ContainerDied","Data":"18353431ccf38d1b356e469e7d2f324250a74040968ca8c29a1029b626e574b0"} Sep 30 13:47:47 crc kubenswrapper[4783]: I0930 13:47:47.283534 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq" Sep 30 13:47:47 crc kubenswrapper[4783]: I0930 13:47:47.283552 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18353431ccf38d1b356e469e7d2f324250a74040968ca8c29a1029b626e574b0" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.312861 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g"] Sep 30 13:47:49 crc kubenswrapper[4783]: E0930 13:47:49.313051 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="pull" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.313062 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="pull" Sep 30 13:47:49 crc kubenswrapper[4783]: E0930 13:47:49.313075 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="util" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.313081 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="util" Sep 30 13:47:49 crc kubenswrapper[4783]: E0930 13:47:49.313097 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="extract" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.313103 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="extract" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.313188 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d73fa222-1e59-42b8-bb54-9a47ead75914" containerName="extract" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.313546 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.318009 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-6zbf8" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.318143 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.318621 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.330544 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g"] Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.429809 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgcdj\" (UniqueName: \"kubernetes.io/projected/b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c-kube-api-access-dgcdj\") pod \"nmstate-operator-5d6f6cfd66-2j82g\" (UID: \"b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.530903 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgcdj\" (UniqueName: \"kubernetes.io/projected/b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c-kube-api-access-dgcdj\") pod \"nmstate-operator-5d6f6cfd66-2j82g\" (UID: \"b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.559672 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgcdj\" (UniqueName: \"kubernetes.io/projected/b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c-kube-api-access-dgcdj\") pod \"nmstate-operator-5d6f6cfd66-2j82g\" (UID: \"b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" Sep 30 13:47:49 crc kubenswrapper[4783]: I0930 13:47:49.633989 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" Sep 30 13:47:50 crc kubenswrapper[4783]: I0930 13:47:50.079481 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g"] Sep 30 13:47:50 crc kubenswrapper[4783]: W0930 13:47:50.087545 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1643e3b_aa5b_4ab1_8d01_0d16bb277b1c.slice/crio-674ff64fc7987fc71827a27452aaad52fe6661f068062875941b9b5b9f965e64 WatchSource:0}: Error finding container 674ff64fc7987fc71827a27452aaad52fe6661f068062875941b9b5b9f965e64: Status 404 returned error can't find the container with id 674ff64fc7987fc71827a27452aaad52fe6661f068062875941b9b5b9f965e64 Sep 30 13:47:50 crc kubenswrapper[4783]: I0930 13:47:50.306601 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" event={"ID":"b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c","Type":"ContainerStarted","Data":"674ff64fc7987fc71827a27452aaad52fe6661f068062875941b9b5b9f965e64"} Sep 30 13:47:51 crc kubenswrapper[4783]: I0930 13:47:51.932316 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:51 crc kubenswrapper[4783]: I0930 13:47:51.933510 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:51 crc kubenswrapper[4783]: I0930 13:47:51.974353 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:52 crc kubenswrapper[4783]: I0930 13:47:52.385352 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:54 crc kubenswrapper[4783]: I0930 13:47:54.329820 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" event={"ID":"b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c","Type":"ContainerStarted","Data":"175ed7d95c8db52f38df6982c77c2d9f93bd3cd4419e5318499016dd21e36f70"} Sep 30 13:47:54 crc kubenswrapper[4783]: I0930 13:47:54.397420 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-2j82g" podStartSLOduration=1.956253851 podStartE2EDuration="5.39738887s" podCreationTimestamp="2025-09-30 13:47:49 +0000 UTC" firstStartedPulling="2025-09-30 13:47:50.09021735 +0000 UTC m=+770.021683667" lastFinishedPulling="2025-09-30 13:47:53.531352369 +0000 UTC m=+773.462818686" observedRunningTime="2025-09-30 13:47:54.349270358 +0000 UTC m=+774.280736675" watchObservedRunningTime="2025-09-30 13:47:54.39738887 +0000 UTC m=+774.328855177" Sep 30 13:47:54 crc kubenswrapper[4783]: I0930 13:47:54.400347 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.334488 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jpfnb" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="registry-server" containerID="cri-o://48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d" gracePeriod=2 Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.359934 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k96qg"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.360921 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.363240 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-c9wqk" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.378972 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k96qg"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.390613 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.391319 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.393605 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.397108 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-rtqrb"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.397689 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.409781 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdnwp\" (UniqueName: \"kubernetes.io/projected/bca29d47-0e6d-4623-896c-e4ea4ddf1c14-kube-api-access-bdnwp\") pod \"nmstate-metrics-58fcddf996-k96qg\" (UID: \"bca29d47-0e6d-4623-896c-e4ea4ddf1c14\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.430004 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511432 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdnwp\" (UniqueName: \"kubernetes.io/projected/bca29d47-0e6d-4623-896c-e4ea4ddf1c14-kube-api-access-bdnwp\") pod \"nmstate-metrics-58fcddf996-k96qg\" (UID: \"bca29d47-0e6d-4623-896c-e4ea4ddf1c14\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511498 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmx4z\" (UniqueName: \"kubernetes.io/projected/70a55481-e973-4943-9e2b-48b96623f8a8-kube-api-access-vmx4z\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511525 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-nmstate-lock\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511548 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-dbus-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511599 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511647 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfjfn\" (UniqueName: \"kubernetes.io/projected/12776533-ac8c-4d72-b6d1-0a9e46184617-kube-api-access-hfjfn\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.511671 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-ovs-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.512497 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.513286 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.515307 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.515368 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.519892 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-hvk2c" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.539671 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdnwp\" (UniqueName: \"kubernetes.io/projected/bca29d47-0e6d-4623-896c-e4ea4ddf1c14-kube-api-access-bdnwp\") pod \"nmstate-metrics-58fcddf996-k96qg\" (UID: \"bca29d47-0e6d-4623-896c-e4ea4ddf1c14\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.574141 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616191 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616258 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/87a6adf2-811c-4d06-9d91-9ec8608884ac-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616282 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/87a6adf2-811c-4d06-9d91-9ec8608884ac-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616309 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfjfn\" (UniqueName: \"kubernetes.io/projected/12776533-ac8c-4d72-b6d1-0a9e46184617-kube-api-access-hfjfn\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616329 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-ovs-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616356 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5zkt\" (UniqueName: \"kubernetes.io/projected/87a6adf2-811c-4d06-9d91-9ec8608884ac-kube-api-access-q5zkt\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616393 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-nmstate-lock\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616408 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmx4z\" (UniqueName: \"kubernetes.io/projected/70a55481-e973-4943-9e2b-48b96623f8a8-kube-api-access-vmx4z\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616425 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-dbus-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.616744 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-dbus-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: E0930 13:47:55.616833 4783 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 30 13:47:55 crc kubenswrapper[4783]: E0930 13:47:55.616878 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair podName:12776533-ac8c-4d72-b6d1-0a9e46184617 nodeName:}" failed. No retries permitted until 2025-09-30 13:47:56.116861027 +0000 UTC m=+776.048327334 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair") pod "nmstate-webhook-6d689559c5-rvbkm" (UID: "12776533-ac8c-4d72-b6d1-0a9e46184617") : secret "openshift-nmstate-webhook" not found Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.618179 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-ovs-socket\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.618248 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70a55481-e973-4943-9e2b-48b96623f8a8-nmstate-lock\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.648535 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfjfn\" (UniqueName: \"kubernetes.io/projected/12776533-ac8c-4d72-b6d1-0a9e46184617-kube-api-access-hfjfn\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.655489 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmx4z\" (UniqueName: \"kubernetes.io/projected/70a55481-e973-4943-9e2b-48b96623f8a8-kube-api-access-vmx4z\") pod \"nmstate-handler-rtqrb\" (UID: \"70a55481-e973-4943-9e2b-48b96623f8a8\") " pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.687071 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.709981 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-56d75bb464-fm9bz"] Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.710638 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.718192 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5zkt\" (UniqueName: \"kubernetes.io/projected/87a6adf2-811c-4d06-9d91-9ec8608884ac-kube-api-access-q5zkt\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.718283 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/87a6adf2-811c-4d06-9d91-9ec8608884ac-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.718302 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/87a6adf2-811c-4d06-9d91-9ec8608884ac-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.719193 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/87a6adf2-811c-4d06-9d91-9ec8608884ac-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.722102 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/87a6adf2-811c-4d06-9d91-9ec8608884ac-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.739756 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.743088 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5zkt\" (UniqueName: \"kubernetes.io/projected/87a6adf2-811c-4d06-9d91-9ec8608884ac-kube-api-access-q5zkt\") pod \"nmstate-console-plugin-864bb6dfb5-m9kbd\" (UID: \"87a6adf2-811c-4d06-9d91-9ec8608884ac\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.745309 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-56d75bb464-fm9bz"] Sep 30 13:47:55 crc kubenswrapper[4783]: W0930 13:47:55.780073 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70a55481_e973_4943_9e2b_48b96623f8a8.slice/crio-e146f7ab9790283e5acae2b9461f8f7cb78f57d8a2bfce4f8cfa60097c67f210 WatchSource:0}: Error finding container e146f7ab9790283e5acae2b9461f8f7cb78f57d8a2bfce4f8cfa60097c67f210: Status 404 returned error can't find the container with id e146f7ab9790283e5acae2b9461f8f7cb78f57d8a2bfce4f8cfa60097c67f210 Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.813989 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819022 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-oauth-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819065 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-service-ca\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819094 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mslkq\" (UniqueName: \"kubernetes.io/projected/af92b42f-848a-4c83-a8f2-962f1c8bb77d-kube-api-access-mslkq\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819126 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-oauth-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819311 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-trusted-ca-bundle\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819361 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.819444 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.826892 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.920817 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pwsh\" (UniqueName: \"kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh\") pod \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.921234 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content\") pod \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.921261 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities\") pod \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\" (UID: \"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd\") " Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922106 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities" (OuterVolumeSpecName: "utilities") pod "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" (UID: "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922697 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-trusted-ca-bundle\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922734 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922773 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922804 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-oauth-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.922827 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-service-ca\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923688 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923729 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mslkq\" (UniqueName: \"kubernetes.io/projected/af92b42f-848a-4c83-a8f2-962f1c8bb77d-kube-api-access-mslkq\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923748 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-service-ca\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923774 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-oauth-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923847 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.923935 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-trusted-ca-bundle\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.924601 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/af92b42f-848a-4c83-a8f2-962f1c8bb77d-oauth-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.925839 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh" (OuterVolumeSpecName: "kube-api-access-9pwsh") pod "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" (UID: "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd"). InnerVolumeSpecName "kube-api-access-9pwsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.926575 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-oauth-config\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.927035 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/af92b42f-848a-4c83-a8f2-962f1c8bb77d-console-serving-cert\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.940329 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mslkq\" (UniqueName: \"kubernetes.io/projected/af92b42f-848a-4c83-a8f2-962f1c8bb77d-kube-api-access-mslkq\") pod \"console-56d75bb464-fm9bz\" (UID: \"af92b42f-848a-4c83-a8f2-962f1c8bb77d\") " pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:55 crc kubenswrapper[4783]: I0930 13:47:55.993132 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" (UID: "fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.025137 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pwsh\" (UniqueName: \"kubernetes.io/projected/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-kube-api-access-9pwsh\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.025189 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.056509 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.126895 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.131476 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/12776533-ac8c-4d72-b6d1-0a9e46184617-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-rvbkm\" (UID: \"12776533-ac8c-4d72-b6d1-0a9e46184617\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.165013 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k96qg"] Sep 30 13:47:56 crc kubenswrapper[4783]: W0930 13:47:56.172471 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbca29d47_0e6d_4623_896c_e4ea4ddf1c14.slice/crio-1c524f6f67911bef214268bbe9e8093250d2bb57c5aa7096319f89c4cef63612 WatchSource:0}: Error finding container 1c524f6f67911bef214268bbe9e8093250d2bb57c5aa7096319f89c4cef63612: Status 404 returned error can't find the container with id 1c524f6f67911bef214268bbe9e8093250d2bb57c5aa7096319f89c4cef63612 Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.236813 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd"] Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.316030 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.341285 4783 generic.go:334] "Generic (PLEG): container finished" podID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerID="48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d" exitCode=0 Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.341384 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerDied","Data":"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d"} Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.341433 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jpfnb" event={"ID":"fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd","Type":"ContainerDied","Data":"32f84430f4d593fb9d7ba8a820610f752264c91ca7eb4e849342341fabd466f4"} Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.341446 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jpfnb" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.341453 4783 scope.go:117] "RemoveContainer" containerID="48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.343242 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" event={"ID":"87a6adf2-811c-4d06-9d91-9ec8608884ac","Type":"ContainerStarted","Data":"b000de77877cf71a01cbe5124bb908c83e8ccfdbdb9943066c62df5d87de9717"} Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.344947 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-rtqrb" event={"ID":"70a55481-e973-4943-9e2b-48b96623f8a8","Type":"ContainerStarted","Data":"e146f7ab9790283e5acae2b9461f8f7cb78f57d8a2bfce4f8cfa60097c67f210"} Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.346135 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" event={"ID":"bca29d47-0e6d-4623-896c-e4ea4ddf1c14","Type":"ContainerStarted","Data":"1c524f6f67911bef214268bbe9e8093250d2bb57c5aa7096319f89c4cef63612"} Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.361449 4783 scope.go:117] "RemoveContainer" containerID="067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.377376 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.380254 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jpfnb"] Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.401971 4783 scope.go:117] "RemoveContainer" containerID="db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.423388 4783 scope.go:117] "RemoveContainer" containerID="48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d" Sep 30 13:47:56 crc kubenswrapper[4783]: E0930 13:47:56.424311 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d\": container with ID starting with 48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d not found: ID does not exist" containerID="48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.424347 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d"} err="failed to get container status \"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d\": rpc error: code = NotFound desc = could not find container \"48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d\": container with ID starting with 48960f71b15040dd31ac45a96156eb65d4ce3460eaa06bd71e56bb4b90edbf6d not found: ID does not exist" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.424378 4783 scope.go:117] "RemoveContainer" containerID="067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895" Sep 30 13:47:56 crc kubenswrapper[4783]: E0930 13:47:56.424625 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895\": container with ID starting with 067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895 not found: ID does not exist" containerID="067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.424642 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895"} err="failed to get container status \"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895\": rpc error: code = NotFound desc = could not find container \"067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895\": container with ID starting with 067c8fc06fa6832dbdd435429b54661ac8573bf7966524db494cc08b35c65895 not found: ID does not exist" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.424653 4783 scope.go:117] "RemoveContainer" containerID="db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641" Sep 30 13:47:56 crc kubenswrapper[4783]: E0930 13:47:56.424822 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641\": container with ID starting with db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641 not found: ID does not exist" containerID="db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.424843 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641"} err="failed to get container status \"db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641\": rpc error: code = NotFound desc = could not find container \"db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641\": container with ID starting with db6148b14732366cedf13fd6f4b6a63732205f4af6e94cf3cdc8a908b5bf8641 not found: ID does not exist" Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.490558 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-56d75bb464-fm9bz"] Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.760761 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm"] Sep 30 13:47:56 crc kubenswrapper[4783]: W0930 13:47:56.764934 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12776533_ac8c_4d72_b6d1_0a9e46184617.slice/crio-201543bdaefbcc138cd08d304336bbc65eaf58cb20e25ff100ab7a2ac11ced41 WatchSource:0}: Error finding container 201543bdaefbcc138cd08d304336bbc65eaf58cb20e25ff100ab7a2ac11ced41: Status 404 returned error can't find the container with id 201543bdaefbcc138cd08d304336bbc65eaf58cb20e25ff100ab7a2ac11ced41 Sep 30 13:47:56 crc kubenswrapper[4783]: I0930 13:47:56.853585 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" path="/var/lib/kubelet/pods/fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd/volumes" Sep 30 13:47:57 crc kubenswrapper[4783]: I0930 13:47:57.357328 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" event={"ID":"12776533-ac8c-4d72-b6d1-0a9e46184617","Type":"ContainerStarted","Data":"201543bdaefbcc138cd08d304336bbc65eaf58cb20e25ff100ab7a2ac11ced41"} Sep 30 13:47:57 crc kubenswrapper[4783]: I0930 13:47:57.359440 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-56d75bb464-fm9bz" event={"ID":"af92b42f-848a-4c83-a8f2-962f1c8bb77d","Type":"ContainerStarted","Data":"253b6ffb956d6570af6cb342a9dc1484483a25b0cd13fb8ea73af393fd9f941a"} Sep 30 13:47:57 crc kubenswrapper[4783]: I0930 13:47:57.359503 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-56d75bb464-fm9bz" event={"ID":"af92b42f-848a-4c83-a8f2-962f1c8bb77d","Type":"ContainerStarted","Data":"0f423241ecbf65be95698c708b26fda539a3d284fc61d9ca99faa999469c25b9"} Sep 30 13:47:57 crc kubenswrapper[4783]: I0930 13:47:57.396458 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-56d75bb464-fm9bz" podStartSLOduration=2.396423939 podStartE2EDuration="2.396423939s" podCreationTimestamp="2025-09-30 13:47:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:47:57.389994905 +0000 UTC m=+777.321461232" watchObservedRunningTime="2025-09-30 13:47:57.396423939 +0000 UTC m=+777.327890286" Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.409912 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" event={"ID":"87a6adf2-811c-4d06-9d91-9ec8608884ac","Type":"ContainerStarted","Data":"be47860d00c00bdd06826395fd135b7e4affdb3ede1c302785462bc7fbb735f1"} Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.413968 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-rtqrb" event={"ID":"70a55481-e973-4943-9e2b-48b96623f8a8","Type":"ContainerStarted","Data":"2252b569b2a9e20efc6481c2b54898db07502e49e1e1401e5ba98df66e8dde66"} Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.414108 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.416703 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" event={"ID":"bca29d47-0e6d-4623-896c-e4ea4ddf1c14","Type":"ContainerStarted","Data":"cf2bbd9fc3fc00ccb0e3751f4fe6202508dc34155eaeaec0e8810501a1d28a60"} Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.419063 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" event={"ID":"12776533-ac8c-4d72-b6d1-0a9e46184617","Type":"ContainerStarted","Data":"2e8d699a0245216534f2d0c208a2af08ae1db24791dc50aa37a4b1338cc7ad0b"} Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.419308 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.433210 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-m9kbd" podStartSLOduration=1.893474575 podStartE2EDuration="5.43319379s" podCreationTimestamp="2025-09-30 13:47:55 +0000 UTC" firstStartedPulling="2025-09-30 13:47:56.246081152 +0000 UTC m=+776.177547499" lastFinishedPulling="2025-09-30 13:47:59.785800397 +0000 UTC m=+779.717266714" observedRunningTime="2025-09-30 13:48:00.431277789 +0000 UTC m=+780.362744096" watchObservedRunningTime="2025-09-30 13:48:00.43319379 +0000 UTC m=+780.364660097" Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.479333 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-rtqrb" podStartSLOduration=1.477601569 podStartE2EDuration="5.479312947s" podCreationTimestamp="2025-09-30 13:47:55 +0000 UTC" firstStartedPulling="2025-09-30 13:47:55.781826907 +0000 UTC m=+775.713293214" lastFinishedPulling="2025-09-30 13:47:59.783538245 +0000 UTC m=+779.715004592" observedRunningTime="2025-09-30 13:48:00.460143337 +0000 UTC m=+780.391609644" watchObservedRunningTime="2025-09-30 13:48:00.479312947 +0000 UTC m=+780.410779274" Sep 30 13:48:00 crc kubenswrapper[4783]: I0930 13:48:00.481701 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" podStartSLOduration=2.465075304 podStartE2EDuration="5.481689363s" podCreationTimestamp="2025-09-30 13:47:55 +0000 UTC" firstStartedPulling="2025-09-30 13:47:56.768390663 +0000 UTC m=+776.699857010" lastFinishedPulling="2025-09-30 13:47:59.785004722 +0000 UTC m=+779.716471069" observedRunningTime="2025-09-30 13:48:00.480168004 +0000 UTC m=+780.411634311" watchObservedRunningTime="2025-09-30 13:48:00.481689363 +0000 UTC m=+780.413155670" Sep 30 13:48:05 crc kubenswrapper[4783]: I0930 13:48:05.458682 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" event={"ID":"bca29d47-0e6d-4623-896c-e4ea4ddf1c14","Type":"ContainerStarted","Data":"2623534d7635e532af66f4b4c1d8707e5911da626d5f72819525778ce84aa819"} Sep 30 13:48:05 crc kubenswrapper[4783]: I0930 13:48:05.482145 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k96qg" podStartSLOduration=1.904192965 podStartE2EDuration="10.482122064s" podCreationTimestamp="2025-09-30 13:47:55 +0000 UTC" firstStartedPulling="2025-09-30 13:47:56.174053569 +0000 UTC m=+776.105519876" lastFinishedPulling="2025-09-30 13:48:04.751982628 +0000 UTC m=+784.683448975" observedRunningTime="2025-09-30 13:48:05.477427595 +0000 UTC m=+785.408893912" watchObservedRunningTime="2025-09-30 13:48:05.482122064 +0000 UTC m=+785.413588381" Sep 30 13:48:05 crc kubenswrapper[4783]: I0930 13:48:05.782198 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-rtqrb" Sep 30 13:48:06 crc kubenswrapper[4783]: I0930 13:48:06.056884 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:48:06 crc kubenswrapper[4783]: I0930 13:48:06.057265 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:48:06 crc kubenswrapper[4783]: I0930 13:48:06.064737 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:48:06 crc kubenswrapper[4783]: I0930 13:48:06.470866 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-56d75bb464-fm9bz" Sep 30 13:48:06 crc kubenswrapper[4783]: I0930 13:48:06.527281 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:48:07 crc kubenswrapper[4783]: I0930 13:48:07.673806 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:48:07 crc kubenswrapper[4783]: I0930 13:48:07.673863 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:48:16 crc kubenswrapper[4783]: I0930 13:48:16.321479 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-rvbkm" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.132565 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:21 crc kubenswrapper[4783]: E0930 13:48:21.135781 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="extract-content" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.135830 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="extract-content" Sep 30 13:48:21 crc kubenswrapper[4783]: E0930 13:48:21.135915 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="registry-server" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.135943 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="registry-server" Sep 30 13:48:21 crc kubenswrapper[4783]: E0930 13:48:21.135965 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="extract-utilities" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.135971 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="extract-utilities" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.136146 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbbc5ef0-9a28-4e11-8d4a-240ebf2ff2cd" containerName="registry-server" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.136865 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.138807 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.305419 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2fpn\" (UniqueName: \"kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.305876 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.305950 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.407599 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.407657 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.407689 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2fpn\" (UniqueName: \"kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.408394 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.408612 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.424937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2fpn\" (UniqueName: \"kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn\") pod \"certified-operators-6hm2b\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.457375 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:21 crc kubenswrapper[4783]: I0930 13:48:21.909538 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:21 crc kubenswrapper[4783]: W0930 13:48:21.914025 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5086fa0f_c984_468d_ac1e_2fff31540c31.slice/crio-bfaa7c02915eb729d15cd64cfbb4ada8075cbcd4d6a4121fc0985845945ee4f3 WatchSource:0}: Error finding container bfaa7c02915eb729d15cd64cfbb4ada8075cbcd4d6a4121fc0985845945ee4f3: Status 404 returned error can't find the container with id bfaa7c02915eb729d15cd64cfbb4ada8075cbcd4d6a4121fc0985845945ee4f3 Sep 30 13:48:22 crc kubenswrapper[4783]: I0930 13:48:22.577275 4783 generic.go:334] "Generic (PLEG): container finished" podID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerID="d245bcc48a69a50c482b247a64a695031f96b6dcaf6468475098d29599fa864e" exitCode=0 Sep 30 13:48:22 crc kubenswrapper[4783]: I0930 13:48:22.577440 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerDied","Data":"d245bcc48a69a50c482b247a64a695031f96b6dcaf6468475098d29599fa864e"} Sep 30 13:48:22 crc kubenswrapper[4783]: I0930 13:48:22.577608 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerStarted","Data":"bfaa7c02915eb729d15cd64cfbb4ada8075cbcd4d6a4121fc0985845945ee4f3"} Sep 30 13:48:23 crc kubenswrapper[4783]: I0930 13:48:23.588442 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerStarted","Data":"01be4c7d4fbf36866a0019ac0f845fd81e34b52f8985034946555e94aae5b906"} Sep 30 13:48:24 crc kubenswrapper[4783]: I0930 13:48:24.595114 4783 generic.go:334] "Generic (PLEG): container finished" podID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerID="01be4c7d4fbf36866a0019ac0f845fd81e34b52f8985034946555e94aae5b906" exitCode=0 Sep 30 13:48:24 crc kubenswrapper[4783]: I0930 13:48:24.596069 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerDied","Data":"01be4c7d4fbf36866a0019ac0f845fd81e34b52f8985034946555e94aae5b906"} Sep 30 13:48:25 crc kubenswrapper[4783]: I0930 13:48:25.604520 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerStarted","Data":"26ecaa9494dac6a2f61a98407f72a921163ceaf34cee2d4832c1a8a904354314"} Sep 30 13:48:25 crc kubenswrapper[4783]: I0930 13:48:25.622794 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6hm2b" podStartSLOduration=1.975668441 podStartE2EDuration="4.622772921s" podCreationTimestamp="2025-09-30 13:48:21 +0000 UTC" firstStartedPulling="2025-09-30 13:48:22.578601745 +0000 UTC m=+802.510068052" lastFinishedPulling="2025-09-30 13:48:25.225706225 +0000 UTC m=+805.157172532" observedRunningTime="2025-09-30 13:48:25.620966014 +0000 UTC m=+805.552432341" watchObservedRunningTime="2025-09-30 13:48:25.622772921 +0000 UTC m=+805.554239238" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.108049 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.110245 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.120450 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.202190 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.202406 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.202541 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsxkd\" (UniqueName: \"kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.304292 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsxkd\" (UniqueName: \"kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.304376 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.304440 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.307261 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.307348 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.336214 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsxkd\" (UniqueName: \"kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd\") pod \"community-operators-w7g7m\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.440481 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:27 crc kubenswrapper[4783]: I0930 13:48:27.964299 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:28 crc kubenswrapper[4783]: I0930 13:48:28.626833 4783 generic.go:334] "Generic (PLEG): container finished" podID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerID="0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8" exitCode=0 Sep 30 13:48:28 crc kubenswrapper[4783]: I0930 13:48:28.626929 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerDied","Data":"0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8"} Sep 30 13:48:28 crc kubenswrapper[4783]: I0930 13:48:28.627364 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerStarted","Data":"8523614bc3a9e89281682becb81068c266f183148dc378e994f38b71c444902d"} Sep 30 13:48:30 crc kubenswrapper[4783]: I0930 13:48:30.641884 4783 generic.go:334] "Generic (PLEG): container finished" podID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerID="c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16" exitCode=0 Sep 30 13:48:30 crc kubenswrapper[4783]: I0930 13:48:30.642117 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerDied","Data":"c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16"} Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.457974 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.458308 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.495458 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.572636 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-x9v6j" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerName="console" containerID="cri-o://3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1" gracePeriod=15 Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.655120 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerStarted","Data":"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed"} Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.673586 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w7g7m" podStartSLOduration=2.218903282 podStartE2EDuration="4.673569879s" podCreationTimestamp="2025-09-30 13:48:27 +0000 UTC" firstStartedPulling="2025-09-30 13:48:28.629140694 +0000 UTC m=+808.560607001" lastFinishedPulling="2025-09-30 13:48:31.083807251 +0000 UTC m=+811.015273598" observedRunningTime="2025-09-30 13:48:31.670616275 +0000 UTC m=+811.602082622" watchObservedRunningTime="2025-09-30 13:48:31.673569879 +0000 UTC m=+811.605036196" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.702732 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.961433 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x9v6j_ca271c35-f2e4-4c56-a82b-4f47591904f1/console/0.log" Sep 30 13:48:31 crc kubenswrapper[4783]: I0930 13:48:31.961788 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071653 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071710 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071759 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071776 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctgrj\" (UniqueName: \"kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071807 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071828 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.071876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca\") pod \"ca271c35-f2e4-4c56-a82b-4f47591904f1\" (UID: \"ca271c35-f2e4-4c56-a82b-4f47591904f1\") " Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.072522 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca" (OuterVolumeSpecName: "service-ca") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.072558 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.072576 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.073004 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config" (OuterVolumeSpecName: "console-config") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.077032 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.077317 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.078952 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj" (OuterVolumeSpecName: "kube-api-access-ctgrj") pod "ca271c35-f2e4-4c56-a82b-4f47591904f1" (UID: "ca271c35-f2e4-4c56-a82b-4f47591904f1"). InnerVolumeSpecName "kube-api-access-ctgrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173089 4783 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173126 4783 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173139 4783 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173149 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctgrj\" (UniqueName: \"kubernetes.io/projected/ca271c35-f2e4-4c56-a82b-4f47591904f1-kube-api-access-ctgrj\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173158 4783 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173166 4783 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.173189 4783 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ca271c35-f2e4-4c56-a82b-4f47591904f1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.661883 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x9v6j_ca271c35-f2e4-4c56-a82b-4f47591904f1/console/0.log" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.661953 4783 generic.go:334] "Generic (PLEG): container finished" podID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerID="3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1" exitCode=2 Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.662324 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x9v6j" event={"ID":"ca271c35-f2e4-4c56-a82b-4f47591904f1","Type":"ContainerDied","Data":"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1"} Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.662386 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x9v6j" event={"ID":"ca271c35-f2e4-4c56-a82b-4f47591904f1","Type":"ContainerDied","Data":"a0a9b7681f0ef4f26c409acee357008b2f3ec5fd99a7bed7ad97536b1318350d"} Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.662407 4783 scope.go:117] "RemoveContainer" containerID="3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.662550 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x9v6j" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.688786 4783 scope.go:117] "RemoveContainer" containerID="3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1" Sep 30 13:48:32 crc kubenswrapper[4783]: E0930 13:48:32.690235 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1\": container with ID starting with 3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1 not found: ID does not exist" containerID="3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.690273 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1"} err="failed to get container status \"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1\": rpc error: code = NotFound desc = could not find container \"3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1\": container with ID starting with 3fb4128783c5358dba22678809f71a06b5ef48cb4306202e25f71305229b53d1 not found: ID does not exist" Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.693783 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.696034 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-x9v6j"] Sep 30 13:48:32 crc kubenswrapper[4783]: I0930 13:48:32.851630 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" path="/var/lib/kubelet/pods/ca271c35-f2e4-4c56-a82b-4f47591904f1/volumes" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.165039 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp"] Sep 30 13:48:33 crc kubenswrapper[4783]: E0930 13:48:33.165511 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerName="console" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.165551 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerName="console" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.165830 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca271c35-f2e4-4c56-a82b-4f47591904f1" containerName="console" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.167597 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.170741 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.180155 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp"] Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.286114 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzgjz\" (UniqueName: \"kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.286307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.286345 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.387202 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.387314 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.387394 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzgjz\" (UniqueName: \"kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.387849 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.388334 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.419127 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzgjz\" (UniqueName: \"kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.488309 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:33 crc kubenswrapper[4783]: I0930 13:48:33.906821 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp"] Sep 30 13:48:33 crc kubenswrapper[4783]: W0930 13:48:33.915118 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6619aa41_665c_4e5e_bbbd_be79c4c2db9f.slice/crio-666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7 WatchSource:0}: Error finding container 666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7: Status 404 returned error can't find the container with id 666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7 Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.503348 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.503709 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6hm2b" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="registry-server" containerID="cri-o://26ecaa9494dac6a2f61a98407f72a921163ceaf34cee2d4832c1a8a904354314" gracePeriod=2 Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.682471 4783 generic.go:334] "Generic (PLEG): container finished" podID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerID="26ecaa9494dac6a2f61a98407f72a921163ceaf34cee2d4832c1a8a904354314" exitCode=0 Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.682599 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerDied","Data":"26ecaa9494dac6a2f61a98407f72a921163ceaf34cee2d4832c1a8a904354314"} Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.684809 4783 generic.go:334] "Generic (PLEG): container finished" podID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerID="3e25c5dc19d41200728029287f34bccb3b74054acd0d901b416adf8fd6c944df" exitCode=0 Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.684849 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" event={"ID":"6619aa41-665c-4e5e-bbbd-be79c4c2db9f","Type":"ContainerDied","Data":"3e25c5dc19d41200728029287f34bccb3b74054acd0d901b416adf8fd6c944df"} Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.684899 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" event={"ID":"6619aa41-665c-4e5e-bbbd-be79c4c2db9f","Type":"ContainerStarted","Data":"666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7"} Sep 30 13:48:34 crc kubenswrapper[4783]: I0930 13:48:34.944996 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.111426 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2fpn\" (UniqueName: \"kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn\") pod \"5086fa0f-c984-468d-ac1e-2fff31540c31\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.111777 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content\") pod \"5086fa0f-c984-468d-ac1e-2fff31540c31\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.111818 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities\") pod \"5086fa0f-c984-468d-ac1e-2fff31540c31\" (UID: \"5086fa0f-c984-468d-ac1e-2fff31540c31\") " Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.113131 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities" (OuterVolumeSpecName: "utilities") pod "5086fa0f-c984-468d-ac1e-2fff31540c31" (UID: "5086fa0f-c984-468d-ac1e-2fff31540c31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.121514 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn" (OuterVolumeSpecName: "kube-api-access-b2fpn") pod "5086fa0f-c984-468d-ac1e-2fff31540c31" (UID: "5086fa0f-c984-468d-ac1e-2fff31540c31"). InnerVolumeSpecName "kube-api-access-b2fpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.156151 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5086fa0f-c984-468d-ac1e-2fff31540c31" (UID: "5086fa0f-c984-468d-ac1e-2fff31540c31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.214019 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2fpn\" (UniqueName: \"kubernetes.io/projected/5086fa0f-c984-468d-ac1e-2fff31540c31-kube-api-access-b2fpn\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.214076 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.214098 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5086fa0f-c984-468d-ac1e-2fff31540c31-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.694703 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6hm2b" event={"ID":"5086fa0f-c984-468d-ac1e-2fff31540c31","Type":"ContainerDied","Data":"bfaa7c02915eb729d15cd64cfbb4ada8075cbcd4d6a4121fc0985845945ee4f3"} Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.694752 4783 scope.go:117] "RemoveContainer" containerID="26ecaa9494dac6a2f61a98407f72a921163ceaf34cee2d4832c1a8a904354314" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.694805 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6hm2b" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.720759 4783 scope.go:117] "RemoveContainer" containerID="01be4c7d4fbf36866a0019ac0f845fd81e34b52f8985034946555e94aae5b906" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.747861 4783 scope.go:117] "RemoveContainer" containerID="d245bcc48a69a50c482b247a64a695031f96b6dcaf6468475098d29599fa864e" Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.751258 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:35 crc kubenswrapper[4783]: I0930 13:48:35.759034 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6hm2b"] Sep 30 13:48:36 crc kubenswrapper[4783]: I0930 13:48:36.707708 4783 generic.go:334] "Generic (PLEG): container finished" podID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerID="0039bddb3308e5fe191d3f5f912ba2a4963e83d290647bdf86f85c485da490d6" exitCode=0 Sep 30 13:48:36 crc kubenswrapper[4783]: I0930 13:48:36.707980 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" event={"ID":"6619aa41-665c-4e5e-bbbd-be79c4c2db9f","Type":"ContainerDied","Data":"0039bddb3308e5fe191d3f5f912ba2a4963e83d290647bdf86f85c485da490d6"} Sep 30 13:48:36 crc kubenswrapper[4783]: I0930 13:48:36.853173 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" path="/var/lib/kubelet/pods/5086fa0f-c984-468d-ac1e-2fff31540c31/volumes" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.441706 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.441778 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.511379 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.674295 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.674364 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.674411 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.675059 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.675129 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779" gracePeriod=600 Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.718745 4783 generic.go:334] "Generic (PLEG): container finished" podID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerID="050f35cd4e9ebe6e7e2fd459af5fde5fadb83f0cc1c3ff7eb1bda81c8c83ddd1" exitCode=0 Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.718865 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" event={"ID":"6619aa41-665c-4e5e-bbbd-be79c4c2db9f","Type":"ContainerDied","Data":"050f35cd4e9ebe6e7e2fd459af5fde5fadb83f0cc1c3ff7eb1bda81c8c83ddd1"} Sep 30 13:48:37 crc kubenswrapper[4783]: I0930 13:48:37.777407 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:38 crc kubenswrapper[4783]: I0930 13:48:38.727359 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779" exitCode=0 Sep 30 13:48:38 crc kubenswrapper[4783]: I0930 13:48:38.727420 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779"} Sep 30 13:48:38 crc kubenswrapper[4783]: I0930 13:48:38.728263 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc"} Sep 30 13:48:38 crc kubenswrapper[4783]: I0930 13:48:38.728289 4783 scope.go:117] "RemoveContainer" containerID="124a8ad49c667c4b9b3a7d3c02a461dd34a53bfe6b1a15c17af1b4401c60ef79" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.053872 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.167405 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzgjz\" (UniqueName: \"kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz\") pod \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.167574 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util\") pod \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.167805 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle\") pod \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\" (UID: \"6619aa41-665c-4e5e-bbbd-be79c4c2db9f\") " Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.170034 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle" (OuterVolumeSpecName: "bundle") pod "6619aa41-665c-4e5e-bbbd-be79c4c2db9f" (UID: "6619aa41-665c-4e5e-bbbd-be79c4c2db9f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.176695 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz" (OuterVolumeSpecName: "kube-api-access-nzgjz") pod "6619aa41-665c-4e5e-bbbd-be79c4c2db9f" (UID: "6619aa41-665c-4e5e-bbbd-be79c4c2db9f"). InnerVolumeSpecName "kube-api-access-nzgjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.191026 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util" (OuterVolumeSpecName: "util") pod "6619aa41-665c-4e5e-bbbd-be79c4c2db9f" (UID: "6619aa41-665c-4e5e-bbbd-be79c4c2db9f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.270065 4783 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.270115 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzgjz\" (UniqueName: \"kubernetes.io/projected/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-kube-api-access-nzgjz\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.270135 4783 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6619aa41-665c-4e5e-bbbd-be79c4c2db9f-util\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.745071 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" event={"ID":"6619aa41-665c-4e5e-bbbd-be79c4c2db9f","Type":"ContainerDied","Data":"666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7"} Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.745538 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="666db0dad1608f0f0021fa738248e563f02b992050a7f0a076ae44de585f68a7" Sep 30 13:48:39 crc kubenswrapper[4783]: I0930 13:48:39.745159 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp" Sep 30 13:48:43 crc kubenswrapper[4783]: I0930 13:48:43.700188 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:43 crc kubenswrapper[4783]: I0930 13:48:43.701019 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w7g7m" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="registry-server" containerID="cri-o://bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed" gracePeriod=2 Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.113435 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114060 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="extract" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114126 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="extract" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114179 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="util" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114239 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="util" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114294 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="registry-server" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114343 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="registry-server" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114393 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="pull" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114447 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="pull" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114502 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="extract-content" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114558 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="extract-content" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.114608 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="extract-utilities" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114653 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="extract-utilities" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114803 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5086fa0f-c984-468d-ac1e-2fff31540c31" containerName="registry-server" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.114867 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6619aa41-665c-4e5e-bbbd-be79c4c2db9f" containerName="extract" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.115609 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.130052 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.191431 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.249194 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pscsp\" (UniqueName: \"kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.249288 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.249493 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.351150 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities\") pod \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.351565 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content\") pod \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.351840 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsxkd\" (UniqueName: \"kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd\") pod \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\" (UID: \"ffa55fbb-dd3f-4519-9d2e-88aab68af48e\") " Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.352250 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.352502 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pscsp\" (UniqueName: \"kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.352658 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.352842 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.352992 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities" (OuterVolumeSpecName: "utilities") pod "ffa55fbb-dd3f-4519-9d2e-88aab68af48e" (UID: "ffa55fbb-dd3f-4519-9d2e-88aab68af48e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.353543 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.360595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd" (OuterVolumeSpecName: "kube-api-access-nsxkd") pod "ffa55fbb-dd3f-4519-9d2e-88aab68af48e" (UID: "ffa55fbb-dd3f-4519-9d2e-88aab68af48e"). InnerVolumeSpecName "kube-api-access-nsxkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.391992 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pscsp\" (UniqueName: \"kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp\") pod \"redhat-marketplace-6hjsm\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.417488 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ffa55fbb-dd3f-4519-9d2e-88aab68af48e" (UID: "ffa55fbb-dd3f-4519-9d2e-88aab68af48e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.454500 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.454550 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.454562 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsxkd\" (UniqueName: \"kubernetes.io/projected/ffa55fbb-dd3f-4519-9d2e-88aab68af48e-kube-api-access-nsxkd\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.491940 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.775795 4783 generic.go:334] "Generic (PLEG): container finished" podID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerID="bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed" exitCode=0 Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.775835 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerDied","Data":"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed"} Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.775859 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w7g7m" event={"ID":"ffa55fbb-dd3f-4519-9d2e-88aab68af48e","Type":"ContainerDied","Data":"8523614bc3a9e89281682becb81068c266f183148dc378e994f38b71c444902d"} Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.775875 4783 scope.go:117] "RemoveContainer" containerID="bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.775972 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w7g7m" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.795733 4783 scope.go:117] "RemoveContainer" containerID="c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.806484 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.808211 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w7g7m"] Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.833456 4783 scope.go:117] "RemoveContainer" containerID="0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.849174 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" path="/var/lib/kubelet/pods/ffa55fbb-dd3f-4519-9d2e-88aab68af48e/volumes" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.859190 4783 scope.go:117] "RemoveContainer" containerID="bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.860021 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed\": container with ID starting with bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed not found: ID does not exist" containerID="bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.860073 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed"} err="failed to get container status \"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed\": rpc error: code = NotFound desc = could not find container \"bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed\": container with ID starting with bfed4a52aad166556116408e3951bf979ae58d1f3ad36fc9b74439300f93b9ed not found: ID does not exist" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.860112 4783 scope.go:117] "RemoveContainer" containerID="c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.860443 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16\": container with ID starting with c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16 not found: ID does not exist" containerID="c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.860474 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16"} err="failed to get container status \"c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16\": rpc error: code = NotFound desc = could not find container \"c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16\": container with ID starting with c90bf8d31ad0b36787f0329f3a852152d4077cc7bb69d585fb62e99be1cfad16 not found: ID does not exist" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.860500 4783 scope.go:117] "RemoveContainer" containerID="0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8" Sep 30 13:48:44 crc kubenswrapper[4783]: E0930 13:48:44.861071 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8\": container with ID starting with 0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8 not found: ID does not exist" containerID="0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.861098 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8"} err="failed to get container status \"0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8\": rpc error: code = NotFound desc = could not find container \"0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8\": container with ID starting with 0459ef7e87b07aca9b3f516d8ab4b41d9da347db4319fc277860760b22b2d6c8 not found: ID does not exist" Sep 30 13:48:44 crc kubenswrapper[4783]: I0930 13:48:44.973306 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:44 crc kubenswrapper[4783]: W0930 13:48:44.979250 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3f8d1b5_d2f6_4a32_aeeb_739f4230acdb.slice/crio-75611faaa4d59ad1ca75766fecc32992b34385ddef2963644a139cb7050d526f WatchSource:0}: Error finding container 75611faaa4d59ad1ca75766fecc32992b34385ddef2963644a139cb7050d526f: Status 404 returned error can't find the container with id 75611faaa4d59ad1ca75766fecc32992b34385ddef2963644a139cb7050d526f Sep 30 13:48:45 crc kubenswrapper[4783]: I0930 13:48:45.794044 4783 generic.go:334] "Generic (PLEG): container finished" podID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerID="0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b" exitCode=0 Sep 30 13:48:45 crc kubenswrapper[4783]: I0930 13:48:45.794109 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerDied","Data":"0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b"} Sep 30 13:48:45 crc kubenswrapper[4783]: I0930 13:48:45.794201 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerStarted","Data":"75611faaa4d59ad1ca75766fecc32992b34385ddef2963644a139cb7050d526f"} Sep 30 13:48:47 crc kubenswrapper[4783]: I0930 13:48:47.816118 4783 generic.go:334] "Generic (PLEG): container finished" podID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerID="aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c" exitCode=0 Sep 30 13:48:47 crc kubenswrapper[4783]: I0930 13:48:47.816152 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerDied","Data":"aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c"} Sep 30 13:48:48 crc kubenswrapper[4783]: I0930 13:48:48.824640 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerStarted","Data":"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57"} Sep 30 13:48:48 crc kubenswrapper[4783]: I0930 13:48:48.844407 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6hjsm" podStartSLOduration=2.380776554 podStartE2EDuration="4.844392095s" podCreationTimestamp="2025-09-30 13:48:44 +0000 UTC" firstStartedPulling="2025-09-30 13:48:45.798354719 +0000 UTC m=+825.729821026" lastFinishedPulling="2025-09-30 13:48:48.26197026 +0000 UTC m=+828.193436567" observedRunningTime="2025-09-30 13:48:48.844108086 +0000 UTC m=+828.775574403" watchObservedRunningTime="2025-09-30 13:48:48.844392095 +0000 UTC m=+828.775858402" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442182 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd"] Sep 30 13:48:49 crc kubenswrapper[4783]: E0930 13:48:49.442396 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="registry-server" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442408 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="registry-server" Sep 30 13:48:49 crc kubenswrapper[4783]: E0930 13:48:49.442423 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="extract-utilities" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442429 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="extract-utilities" Sep 30 13:48:49 crc kubenswrapper[4783]: E0930 13:48:49.442437 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="extract-content" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442443 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="extract-content" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442535 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffa55fbb-dd3f-4519-9d2e-88aab68af48e" containerName="registry-server" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.442897 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.444609 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.444877 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.445082 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-qp6bw" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.446307 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.447688 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.465522 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd"] Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.615955 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gnch\" (UniqueName: \"kubernetes.io/projected/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-kube-api-access-9gnch\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.616070 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-apiservice-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.616358 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-webhook-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.672326 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp"] Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.672994 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.674559 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.674698 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-sj8sh" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.675150 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.687293 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp"] Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.730572 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gnch\" (UniqueName: \"kubernetes.io/projected/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-kube-api-access-9gnch\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.730655 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-apiservice-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.730702 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-webhook-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.736050 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-apiservice-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.736072 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-webhook-cert\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.756289 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gnch\" (UniqueName: \"kubernetes.io/projected/81fee3e2-b4bc-46a3-85dc-3a15c11a1620-kube-api-access-9gnch\") pod \"metallb-operator-controller-manager-6d6c5dbbcc-sqqgd\" (UID: \"81fee3e2-b4bc-46a3-85dc-3a15c11a1620\") " pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.756976 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.836268 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-apiservice-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.842454 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-webhook-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.842974 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qdfb\" (UniqueName: \"kubernetes.io/projected/67f22223-c5d5-43af-9c5c-0791aec426e7-kube-api-access-2qdfb\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.944774 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-apiservice-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.944870 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-webhook-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.944918 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qdfb\" (UniqueName: \"kubernetes.io/projected/67f22223-c5d5-43af-9c5c-0791aec426e7-kube-api-access-2qdfb\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.951777 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-webhook-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.954945 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/67f22223-c5d5-43af-9c5c-0791aec426e7-apiservice-cert\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.965791 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qdfb\" (UniqueName: \"kubernetes.io/projected/67f22223-c5d5-43af-9c5c-0791aec426e7-kube-api-access-2qdfb\") pod \"metallb-operator-webhook-server-c6c769cb9-27kpp\" (UID: \"67f22223-c5d5-43af-9c5c-0791aec426e7\") " pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:49 crc kubenswrapper[4783]: I0930 13:48:49.985434 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:50 crc kubenswrapper[4783]: I0930 13:48:50.183707 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd"] Sep 30 13:48:50 crc kubenswrapper[4783]: W0930 13:48:50.199767 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81fee3e2_b4bc_46a3_85dc_3a15c11a1620.slice/crio-a97bd805eb432e6d43c484bba256cbc9e8e870c81a3a96e73dd622febbb3b1b6 WatchSource:0}: Error finding container a97bd805eb432e6d43c484bba256cbc9e8e870c81a3a96e73dd622febbb3b1b6: Status 404 returned error can't find the container with id a97bd805eb432e6d43c484bba256cbc9e8e870c81a3a96e73dd622febbb3b1b6 Sep 30 13:48:50 crc kubenswrapper[4783]: I0930 13:48:50.493742 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp"] Sep 30 13:48:50 crc kubenswrapper[4783]: W0930 13:48:50.494547 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67f22223_c5d5_43af_9c5c_0791aec426e7.slice/crio-a2d4f013541666e664f5f83dacc7e3ff8e50f21c0102a15f83bab0cc31a6e683 WatchSource:0}: Error finding container a2d4f013541666e664f5f83dacc7e3ff8e50f21c0102a15f83bab0cc31a6e683: Status 404 returned error can't find the container with id a2d4f013541666e664f5f83dacc7e3ff8e50f21c0102a15f83bab0cc31a6e683 Sep 30 13:48:50 crc kubenswrapper[4783]: I0930 13:48:50.835958 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" event={"ID":"67f22223-c5d5-43af-9c5c-0791aec426e7","Type":"ContainerStarted","Data":"a2d4f013541666e664f5f83dacc7e3ff8e50f21c0102a15f83bab0cc31a6e683"} Sep 30 13:48:50 crc kubenswrapper[4783]: I0930 13:48:50.837399 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" event={"ID":"81fee3e2-b4bc-46a3-85dc-3a15c11a1620","Type":"ContainerStarted","Data":"a97bd805eb432e6d43c484bba256cbc9e8e870c81a3a96e73dd622febbb3b1b6"} Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.493118 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.493493 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.544795 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.859105 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" event={"ID":"81fee3e2-b4bc-46a3-85dc-3a15c11a1620","Type":"ContainerStarted","Data":"ef99ba34502c86126cfd0214f9b6a6307f5d1cb818ec451845624e3fd4a0042b"} Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.859460 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.860608 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" event={"ID":"67f22223-c5d5-43af-9c5c-0791aec426e7","Type":"ContainerStarted","Data":"4b208f186562c4848fd031bd400bce84f39b24020a12831513c99f6592280f70"} Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.861042 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.882155 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" podStartSLOduration=1.620309211 podStartE2EDuration="5.882134607s" podCreationTimestamp="2025-09-30 13:48:49 +0000 UTC" firstStartedPulling="2025-09-30 13:48:50.203338731 +0000 UTC m=+830.134805048" lastFinishedPulling="2025-09-30 13:48:54.465164137 +0000 UTC m=+834.396630444" observedRunningTime="2025-09-30 13:48:54.879088659 +0000 UTC m=+834.810554986" watchObservedRunningTime="2025-09-30 13:48:54.882134607 +0000 UTC m=+834.813600934" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.901828 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" podStartSLOduration=1.8547614019999998 podStartE2EDuration="5.901805742s" podCreationTimestamp="2025-09-30 13:48:49 +0000 UTC" firstStartedPulling="2025-09-30 13:48:50.500382044 +0000 UTC m=+830.431848351" lastFinishedPulling="2025-09-30 13:48:54.547426384 +0000 UTC m=+834.478892691" observedRunningTime="2025-09-30 13:48:54.900468819 +0000 UTC m=+834.831935136" watchObservedRunningTime="2025-09-30 13:48:54.901805742 +0000 UTC m=+834.833272049" Sep 30 13:48:54 crc kubenswrapper[4783]: I0930 13:48:54.920533 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:55 crc kubenswrapper[4783]: I0930 13:48:55.697051 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:56 crc kubenswrapper[4783]: I0930 13:48:56.871959 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6hjsm" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="registry-server" containerID="cri-o://1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57" gracePeriod=2 Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.348809 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.544651 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities\") pod \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.544695 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pscsp\" (UniqueName: \"kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp\") pod \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.544720 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content\") pod \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\" (UID: \"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb\") " Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.545614 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities" (OuterVolumeSpecName: "utilities") pod "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" (UID: "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.550018 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp" (OuterVolumeSpecName: "kube-api-access-pscsp") pod "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" (UID: "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb"). InnerVolumeSpecName "kube-api-access-pscsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.556152 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" (UID: "e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.646172 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pscsp\" (UniqueName: \"kubernetes.io/projected/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-kube-api-access-pscsp\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.646211 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.646241 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.878190 4783 generic.go:334] "Generic (PLEG): container finished" podID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerID="1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57" exitCode=0 Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.878238 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerDied","Data":"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57"} Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.878488 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hjsm" event={"ID":"e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb","Type":"ContainerDied","Data":"75611faaa4d59ad1ca75766fecc32992b34385ddef2963644a139cb7050d526f"} Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.878252 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hjsm" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.878505 4783 scope.go:117] "RemoveContainer" containerID="1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.893290 4783 scope.go:117] "RemoveContainer" containerID="aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.907494 4783 scope.go:117] "RemoveContainer" containerID="0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.919365 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.926837 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hjsm"] Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.944172 4783 scope.go:117] "RemoveContainer" containerID="1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57" Sep 30 13:48:57 crc kubenswrapper[4783]: E0930 13:48:57.946741 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57\": container with ID starting with 1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57 not found: ID does not exist" containerID="1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.946792 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57"} err="failed to get container status \"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57\": rpc error: code = NotFound desc = could not find container \"1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57\": container with ID starting with 1e4b435b0af50e627cb8308850a75f3f8bf776ac8daba882d10f2dad4fb59e57 not found: ID does not exist" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.946820 4783 scope.go:117] "RemoveContainer" containerID="aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c" Sep 30 13:48:57 crc kubenswrapper[4783]: E0930 13:48:57.947125 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c\": container with ID starting with aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c not found: ID does not exist" containerID="aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.947159 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c"} err="failed to get container status \"aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c\": rpc error: code = NotFound desc = could not find container \"aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c\": container with ID starting with aacaaed5398429972a5b714dd810b351e797637cc52d344b31eaf19c115f713c not found: ID does not exist" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.947185 4783 scope.go:117] "RemoveContainer" containerID="0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b" Sep 30 13:48:57 crc kubenswrapper[4783]: E0930 13:48:57.947473 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b\": container with ID starting with 0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b not found: ID does not exist" containerID="0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b" Sep 30 13:48:57 crc kubenswrapper[4783]: I0930 13:48:57.947507 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b"} err="failed to get container status \"0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b\": rpc error: code = NotFound desc = could not find container \"0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b\": container with ID starting with 0b290c16028f2c6245d78204b10bd0794304705bf64ae0576727e642b9f87c7b not found: ID does not exist" Sep 30 13:48:58 crc kubenswrapper[4783]: I0930 13:48:58.851124 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" path="/var/lib/kubelet/pods/e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb/volumes" Sep 30 13:49:09 crc kubenswrapper[4783]: I0930 13:49:09.996351 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-c6c769cb9-27kpp" Sep 30 13:49:29 crc kubenswrapper[4783]: I0930 13:49:29.761008 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6d6c5dbbcc-sqqgd" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.626681 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-z6csc"] Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.626909 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="extract-content" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.626922 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="extract-content" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.626939 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="extract-utilities" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.626945 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="extract-utilities" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.626956 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="registry-server" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.626962 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="registry-server" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.627061 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3f8d1b5-d2f6-4a32-aeeb-739f4230acdb" containerName="registry-server" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.628783 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.631392 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.631397 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.631583 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-g4jjb" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.632515 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2"] Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.633157 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.634280 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.659323 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2"] Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.711939 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-sfsjm"] Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.712755 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.715085 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.715454 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.715525 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-j9rr9" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.715689 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.732784 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-mxhr5"] Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.733640 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.735445 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.761742 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-mxhr5"] Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767428 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-cert\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767478 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-conf\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767504 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8lxg\" (UniqueName: \"kubernetes.io/projected/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-kube-api-access-j8lxg\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767521 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-reloader\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767539 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767570 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-sockets\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767586 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767605 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26904722-3358-41d0-9485-379c77a69694-frr-startup\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767622 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767638 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7685957-e95a-4b5e-af72-6cede8277b41-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767653 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwx54\" (UniqueName: \"kubernetes.io/projected/26904722-3358-41d0-9485-379c77a69694-kube-api-access-hwx54\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767675 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28dkm\" (UniqueName: \"kubernetes.io/projected/f3657dc0-cffe-4309-ba77-ee5c025db0b5-kube-api-access-28dkm\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767932 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-metrics\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.767978 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.768006 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metallb-excludel2\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.768070 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44p4f\" (UniqueName: \"kubernetes.io/projected/f7685957-e95a-4b5e-af72-6cede8277b41-kube-api-access-44p4f\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.868941 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8lxg\" (UniqueName: \"kubernetes.io/projected/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-kube-api-access-j8lxg\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.868995 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-reloader\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869037 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869073 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-sockets\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869095 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869121 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26904722-3358-41d0-9485-379c77a69694-frr-startup\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869144 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869170 4783 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869620 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-reloader\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869251 4783 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869676 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs podName:7faa07ce-5317-4817-a37d-66ec8ab6c2cd nodeName:}" failed. No retries permitted until 2025-09-30 13:49:31.369649851 +0000 UTC m=+871.301116158 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs") pod "controller-5d688f5ffc-mxhr5" (UID: "7faa07ce-5317-4817-a37d-66ec8ab6c2cd") : secret "controller-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869695 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-sockets\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869383 4783 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869720 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist podName:f3657dc0-cffe-4309-ba77-ee5c025db0b5 nodeName:}" failed. No retries permitted until 2025-09-30 13:49:31.369701092 +0000 UTC m=+871.301167509 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist") pod "speaker-sfsjm" (UID: "f3657dc0-cffe-4309-ba77-ee5c025db0b5") : secret "metallb-memberlist" not found Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.869806 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs podName:f3657dc0-cffe-4309-ba77-ee5c025db0b5 nodeName:}" failed. No retries permitted until 2025-09-30 13:49:31.369785115 +0000 UTC m=+871.301251442 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs") pod "speaker-sfsjm" (UID: "f3657dc0-cffe-4309-ba77-ee5c025db0b5") : secret "speaker-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869848 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwx54\" (UniqueName: \"kubernetes.io/projected/26904722-3358-41d0-9485-379c77a69694-kube-api-access-hwx54\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869874 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7685957-e95a-4b5e-af72-6cede8277b41-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.869904 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28dkm\" (UniqueName: \"kubernetes.io/projected/f3657dc0-cffe-4309-ba77-ee5c025db0b5-kube-api-access-28dkm\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870153 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-metrics\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870177 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870093 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26904722-3358-41d0-9485-379c77a69694-frr-startup\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870200 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metallb-excludel2\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870283 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44p4f\" (UniqueName: \"kubernetes.io/projected/f7685957-e95a-4b5e-af72-6cede8277b41-kube-api-access-44p4f\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.870314 4783 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870322 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-conf\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870342 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-cert\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: E0930 13:49:30.870390 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs podName:26904722-3358-41d0-9485-379c77a69694 nodeName:}" failed. No retries permitted until 2025-09-30 13:49:31.370375414 +0000 UTC m=+871.301841721 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs") pod "frr-k8s-z6csc" (UID: "26904722-3358-41d0-9485-379c77a69694") : secret "frr-k8s-certs-secret" not found Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870471 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-metrics\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.870620 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26904722-3358-41d0-9485-379c77a69694-frr-conf\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.871291 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metallb-excludel2\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.872396 4783 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.886944 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7685957-e95a-4b5e-af72-6cede8277b41-cert\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.889635 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-cert\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.898987 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44p4f\" (UniqueName: \"kubernetes.io/projected/f7685957-e95a-4b5e-af72-6cede8277b41-kube-api-access-44p4f\") pod \"frr-k8s-webhook-server-5478bdb765-zbpb2\" (UID: \"f7685957-e95a-4b5e-af72-6cede8277b41\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.901353 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28dkm\" (UniqueName: \"kubernetes.io/projected/f3657dc0-cffe-4309-ba77-ee5c025db0b5-kube-api-access-28dkm\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.902601 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwx54\" (UniqueName: \"kubernetes.io/projected/26904722-3358-41d0-9485-379c77a69694-kube-api-access-hwx54\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.907532 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8lxg\" (UniqueName: \"kubernetes.io/projected/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-kube-api-access-j8lxg\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:30 crc kubenswrapper[4783]: I0930 13:49:30.958149 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.379959 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.380287 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.380313 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.380366 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:31 crc kubenswrapper[4783]: E0930 13:49:31.380491 4783 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 13:49:31 crc kubenswrapper[4783]: E0930 13:49:31.380598 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist podName:f3657dc0-cffe-4309-ba77-ee5c025db0b5 nodeName:}" failed. No retries permitted until 2025-09-30 13:49:32.380576589 +0000 UTC m=+872.312042896 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist") pod "speaker-sfsjm" (UID: "f3657dc0-cffe-4309-ba77-ee5c025db0b5") : secret "metallb-memberlist" not found Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.385247 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-metrics-certs\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.385647 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26904722-3358-41d0-9485-379c77a69694-metrics-certs\") pod \"frr-k8s-z6csc\" (UID: \"26904722-3358-41d0-9485-379c77a69694\") " pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.385908 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7faa07ce-5317-4817-a37d-66ec8ab6c2cd-metrics-certs\") pod \"controller-5d688f5ffc-mxhr5\" (UID: \"7faa07ce-5317-4817-a37d-66ec8ab6c2cd\") " pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.393610 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2"] Sep 30 13:49:31 crc kubenswrapper[4783]: W0930 13:49:31.401048 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7685957_e95a_4b5e_af72_6cede8277b41.slice/crio-ee7e41d68f24210275d06ac0910762e7487042b52bf51a46718c6f06cb4abb19 WatchSource:0}: Error finding container ee7e41d68f24210275d06ac0910762e7487042b52bf51a46718c6f06cb4abb19: Status 404 returned error can't find the container with id ee7e41d68f24210275d06ac0910762e7487042b52bf51a46718c6f06cb4abb19 Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.548383 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.652931 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:31 crc kubenswrapper[4783]: I0930 13:49:31.914709 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-mxhr5"] Sep 30 13:49:31 crc kubenswrapper[4783]: W0930 13:49:31.923972 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7faa07ce_5317_4817_a37d_66ec8ab6c2cd.slice/crio-d0c7a52754dabf72e1f82201994e2c4ac20d2df16a3ea9686a6787e73ede0ebb WatchSource:0}: Error finding container d0c7a52754dabf72e1f82201994e2c4ac20d2df16a3ea9686a6787e73ede0ebb: Status 404 returned error can't find the container with id d0c7a52754dabf72e1f82201994e2c4ac20d2df16a3ea9686a6787e73ede0ebb Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.116584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"d9a807e1a7a21df411f44165a890cf3c403f55e9bc648713a8ace4f347f2cbfa"} Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.118163 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-mxhr5" event={"ID":"7faa07ce-5317-4817-a37d-66ec8ab6c2cd","Type":"ContainerStarted","Data":"415b45a3e644bd1e36a516ced505c7ed94974aa0d9f8c8e4676ca9195842c7fa"} Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.118212 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-mxhr5" event={"ID":"7faa07ce-5317-4817-a37d-66ec8ab6c2cd","Type":"ContainerStarted","Data":"d0c7a52754dabf72e1f82201994e2c4ac20d2df16a3ea9686a6787e73ede0ebb"} Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.119496 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" event={"ID":"f7685957-e95a-4b5e-af72-6cede8277b41","Type":"ContainerStarted","Data":"ee7e41d68f24210275d06ac0910762e7487042b52bf51a46718c6f06cb4abb19"} Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.394468 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.401437 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f3657dc0-cffe-4309-ba77-ee5c025db0b5-memberlist\") pod \"speaker-sfsjm\" (UID: \"f3657dc0-cffe-4309-ba77-ee5c025db0b5\") " pod="metallb-system/speaker-sfsjm" Sep 30 13:49:32 crc kubenswrapper[4783]: I0930 13:49:32.528278 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-sfsjm" Sep 30 13:49:32 crc kubenswrapper[4783]: W0930 13:49:32.551773 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3657dc0_cffe_4309_ba77_ee5c025db0b5.slice/crio-151801755b7eb74f256b0698d76b9249fd0f72bcab7c39e8d8ed8bf974501418 WatchSource:0}: Error finding container 151801755b7eb74f256b0698d76b9249fd0f72bcab7c39e8d8ed8bf974501418: Status 404 returned error can't find the container with id 151801755b7eb74f256b0698d76b9249fd0f72bcab7c39e8d8ed8bf974501418 Sep 30 13:49:33 crc kubenswrapper[4783]: I0930 13:49:33.137305 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-mxhr5" event={"ID":"7faa07ce-5317-4817-a37d-66ec8ab6c2cd","Type":"ContainerStarted","Data":"a8feba6039b0340d1aa57f50ffdd3a98d57cc0650ade06bbcbc8e34de96aec0f"} Sep 30 13:49:33 crc kubenswrapper[4783]: I0930 13:49:33.137679 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:33 crc kubenswrapper[4783]: I0930 13:49:33.139352 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sfsjm" event={"ID":"f3657dc0-cffe-4309-ba77-ee5c025db0b5","Type":"ContainerStarted","Data":"2c204dd3cb597d5a8c0aacd4826c9cf07907da1785b0ab1d9c9b8cf059a6f7b4"} Sep 30 13:49:33 crc kubenswrapper[4783]: I0930 13:49:33.139487 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sfsjm" event={"ID":"f3657dc0-cffe-4309-ba77-ee5c025db0b5","Type":"ContainerStarted","Data":"151801755b7eb74f256b0698d76b9249fd0f72bcab7c39e8d8ed8bf974501418"} Sep 30 13:49:33 crc kubenswrapper[4783]: I0930 13:49:33.153016 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-mxhr5" podStartSLOduration=3.153003083 podStartE2EDuration="3.153003083s" podCreationTimestamp="2025-09-30 13:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:49:33.151841246 +0000 UTC m=+873.083307553" watchObservedRunningTime="2025-09-30 13:49:33.153003083 +0000 UTC m=+873.084469390" Sep 30 13:49:34 crc kubenswrapper[4783]: I0930 13:49:34.149735 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sfsjm" event={"ID":"f3657dc0-cffe-4309-ba77-ee5c025db0b5","Type":"ContainerStarted","Data":"6e7bd0e1aee0a7c56ca25c13014bf179395021177a6eb356b92da66ac0077693"} Sep 30 13:49:34 crc kubenswrapper[4783]: I0930 13:49:34.149830 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-sfsjm" Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.200364 4783 generic.go:334] "Generic (PLEG): container finished" podID="26904722-3358-41d0-9485-379c77a69694" containerID="9b2bd93db06458d27d31e6d473a712bca9fc178b06782bfb781273b5b8524a51" exitCode=0 Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.200449 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerDied","Data":"9b2bd93db06458d27d31e6d473a712bca9fc178b06782bfb781273b5b8524a51"} Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.204035 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" event={"ID":"f7685957-e95a-4b5e-af72-6cede8277b41","Type":"ContainerStarted","Data":"09c01e18fce3b9b6ace7e023f3fa6b89f32294fed76e979e015df340dd468624"} Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.204271 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.239632 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-sfsjm" podStartSLOduration=9.23961229 podStartE2EDuration="9.23961229s" podCreationTimestamp="2025-09-30 13:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:49:34.172764782 +0000 UTC m=+874.104231089" watchObservedRunningTime="2025-09-30 13:49:39.23961229 +0000 UTC m=+879.171078607" Sep 30 13:49:39 crc kubenswrapper[4783]: I0930 13:49:39.256597 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" podStartSLOduration=2.567002901 podStartE2EDuration="9.256578851s" podCreationTimestamp="2025-09-30 13:49:30 +0000 UTC" firstStartedPulling="2025-09-30 13:49:31.403536981 +0000 UTC m=+871.335003288" lastFinishedPulling="2025-09-30 13:49:38.093112921 +0000 UTC m=+878.024579238" observedRunningTime="2025-09-30 13:49:39.252823352 +0000 UTC m=+879.184289719" watchObservedRunningTime="2025-09-30 13:49:39.256578851 +0000 UTC m=+879.188045168" Sep 30 13:49:40 crc kubenswrapper[4783]: I0930 13:49:40.213484 4783 generic.go:334] "Generic (PLEG): container finished" podID="26904722-3358-41d0-9485-379c77a69694" containerID="6fb4bd374dc858611e8570d77d1638c04d12285f0d6fd0a846f1bd4db5caf0df" exitCode=0 Sep 30 13:49:40 crc kubenswrapper[4783]: I0930 13:49:40.213617 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerDied","Data":"6fb4bd374dc858611e8570d77d1638c04d12285f0d6fd0a846f1bd4db5caf0df"} Sep 30 13:49:41 crc kubenswrapper[4783]: I0930 13:49:41.221901 4783 generic.go:334] "Generic (PLEG): container finished" podID="26904722-3358-41d0-9485-379c77a69694" containerID="63d6fa565273029634c3b08dfcbfd951b6588854f15b175b59dd9f0461b2d0c3" exitCode=0 Sep 30 13:49:41 crc kubenswrapper[4783]: I0930 13:49:41.221992 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerDied","Data":"63d6fa565273029634c3b08dfcbfd951b6588854f15b175b59dd9f0461b2d0c3"} Sep 30 13:49:42 crc kubenswrapper[4783]: I0930 13:49:42.233512 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"3401cebdc2711b715205303ba23991d8f557ba12f37851a9118d2ac4b5e9d21f"} Sep 30 13:49:42 crc kubenswrapper[4783]: I0930 13:49:42.233858 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"f8609080b68089f5f60858264f374b2d19bc60038555c4ce32fae6899dc5d5be"} Sep 30 13:49:42 crc kubenswrapper[4783]: I0930 13:49:42.233882 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"4275591f671fc557b360a2cd45d659a327186d9c4f703124064011e74dbcc68d"} Sep 30 13:49:42 crc kubenswrapper[4783]: I0930 13:49:42.233897 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"b530278cc5d23a5a7c2b2847a2a9355de71c926ef18fdae978cd156a507ffcd7"} Sep 30 13:49:42 crc kubenswrapper[4783]: I0930 13:49:42.535167 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-sfsjm" Sep 30 13:49:43 crc kubenswrapper[4783]: I0930 13:49:43.242626 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"6af20407d85a98980107bf690e405ff19a1a363c5f4a74aee6030afa7cc47514"} Sep 30 13:49:43 crc kubenswrapper[4783]: I0930 13:49:43.242674 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-z6csc" event={"ID":"26904722-3358-41d0-9485-379c77a69694","Type":"ContainerStarted","Data":"c5e64f7c9d290559441a37877a5a8d2e00f90fda219dbe14e41c475baec6de85"} Sep 30 13:49:43 crc kubenswrapper[4783]: I0930 13:49:43.242862 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:43 crc kubenswrapper[4783]: I0930 13:49:43.287396 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-z6csc" podStartSLOduration=6.937711279 podStartE2EDuration="13.287376573s" podCreationTimestamp="2025-09-30 13:49:30 +0000 UTC" firstStartedPulling="2025-09-30 13:49:31.762761103 +0000 UTC m=+871.694227440" lastFinishedPulling="2025-09-30 13:49:38.112426427 +0000 UTC m=+878.043892734" observedRunningTime="2025-09-30 13:49:43.284313065 +0000 UTC m=+883.215779372" watchObservedRunningTime="2025-09-30 13:49:43.287376573 +0000 UTC m=+883.218842880" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.345004 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59"] Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.346734 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.348884 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.356926 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59"] Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.361423 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tszfr\" (UniqueName: \"kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.361512 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.361592 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.463403 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tszfr\" (UniqueName: \"kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.463494 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.463549 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.464026 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.464149 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.487364 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tszfr\" (UniqueName: \"kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:44 crc kubenswrapper[4783]: I0930 13:49:44.662688 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:45 crc kubenswrapper[4783]: I0930 13:49:45.100148 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59"] Sep 30 13:49:45 crc kubenswrapper[4783]: I0930 13:49:45.274380 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerStarted","Data":"157d305fcb1f7789fe120b75b243b6a67e19cf3652f7bb70aea761466b922091"} Sep 30 13:49:45 crc kubenswrapper[4783]: I0930 13:49:45.274421 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerStarted","Data":"0400c8b92c3bc3f8c30b6bfd43cefa63268c776c93e6111144917c97241c689f"} Sep 30 13:49:46 crc kubenswrapper[4783]: I0930 13:49:46.281927 4783 generic.go:334] "Generic (PLEG): container finished" podID="4398f894-90ea-458d-8719-40757c59780c" containerID="157d305fcb1f7789fe120b75b243b6a67e19cf3652f7bb70aea761466b922091" exitCode=0 Sep 30 13:49:46 crc kubenswrapper[4783]: I0930 13:49:46.281991 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerDied","Data":"157d305fcb1f7789fe120b75b243b6a67e19cf3652f7bb70aea761466b922091"} Sep 30 13:49:46 crc kubenswrapper[4783]: I0930 13:49:46.550298 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:46 crc kubenswrapper[4783]: I0930 13:49:46.586841 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:50 crc kubenswrapper[4783]: I0930 13:49:50.313606 4783 generic.go:334] "Generic (PLEG): container finished" podID="4398f894-90ea-458d-8719-40757c59780c" containerID="5f6eb859ee4a1dba34bf3cf833b524370d3e6a75a2236085e527760ecff5b310" exitCode=0 Sep 30 13:49:50 crc kubenswrapper[4783]: I0930 13:49:50.313674 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerDied","Data":"5f6eb859ee4a1dba34bf3cf833b524370d3e6a75a2236085e527760ecff5b310"} Sep 30 13:49:50 crc kubenswrapper[4783]: I0930 13:49:50.967507 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-zbpb2" Sep 30 13:49:51 crc kubenswrapper[4783]: I0930 13:49:51.323020 4783 generic.go:334] "Generic (PLEG): container finished" podID="4398f894-90ea-458d-8719-40757c59780c" containerID="78d764b893a3b9ceb454ab95e34b6133d629df2191b0e79f24da2e0075656979" exitCode=0 Sep 30 13:49:51 crc kubenswrapper[4783]: I0930 13:49:51.323078 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerDied","Data":"78d764b893a3b9ceb454ab95e34b6133d629df2191b0e79f24da2e0075656979"} Sep 30 13:49:51 crc kubenswrapper[4783]: I0930 13:49:51.552073 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-z6csc" Sep 30 13:49:51 crc kubenswrapper[4783]: I0930 13:49:51.659425 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-mxhr5" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.590019 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.691632 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tszfr\" (UniqueName: \"kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr\") pod \"4398f894-90ea-458d-8719-40757c59780c\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.691713 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle\") pod \"4398f894-90ea-458d-8719-40757c59780c\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.691848 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util\") pod \"4398f894-90ea-458d-8719-40757c59780c\" (UID: \"4398f894-90ea-458d-8719-40757c59780c\") " Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.698595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr" (OuterVolumeSpecName: "kube-api-access-tszfr") pod "4398f894-90ea-458d-8719-40757c59780c" (UID: "4398f894-90ea-458d-8719-40757c59780c"). InnerVolumeSpecName "kube-api-access-tszfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.700520 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle" (OuterVolumeSpecName: "bundle") pod "4398f894-90ea-458d-8719-40757c59780c" (UID: "4398f894-90ea-458d-8719-40757c59780c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.705044 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util" (OuterVolumeSpecName: "util") pod "4398f894-90ea-458d-8719-40757c59780c" (UID: "4398f894-90ea-458d-8719-40757c59780c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.793203 4783 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-util\") on node \"crc\" DevicePath \"\"" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.793306 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tszfr\" (UniqueName: \"kubernetes.io/projected/4398f894-90ea-458d-8719-40757c59780c-kube-api-access-tszfr\") on node \"crc\" DevicePath \"\"" Sep 30 13:49:52 crc kubenswrapper[4783]: I0930 13:49:52.793340 4783 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4398f894-90ea-458d-8719-40757c59780c-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:49:53 crc kubenswrapper[4783]: I0930 13:49:53.339250 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" event={"ID":"4398f894-90ea-458d-8719-40757c59780c","Type":"ContainerDied","Data":"0400c8b92c3bc3f8c30b6bfd43cefa63268c776c93e6111144917c97241c689f"} Sep 30 13:49:53 crc kubenswrapper[4783]: I0930 13:49:53.339303 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0400c8b92c3bc3f8c30b6bfd43cefa63268c776c93e6111144917c97241c689f" Sep 30 13:49:53 crc kubenswrapper[4783]: I0930 13:49:53.339355 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.494406 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn"] Sep 30 13:49:56 crc kubenswrapper[4783]: E0930 13:49:56.494871 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="util" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.494883 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="util" Sep 30 13:49:56 crc kubenswrapper[4783]: E0930 13:49:56.494895 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="extract" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.494901 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="extract" Sep 30 13:49:56 crc kubenswrapper[4783]: E0930 13:49:56.494915 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="pull" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.494922 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="pull" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.495034 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="4398f894-90ea-458d-8719-40757c59780c" containerName="extract" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.495487 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.497619 4783 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-gb86h" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.499715 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.499875 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.514892 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn"] Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.549162 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk2wd\" (UniqueName: \"kubernetes.io/projected/795e498e-3709-4a3e-af83-5e47f4665716-kube-api-access-mk2wd\") pod \"cert-manager-operator-controller-manager-57cd46d6d-4k6pn\" (UID: \"795e498e-3709-4a3e-af83-5e47f4665716\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.650190 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk2wd\" (UniqueName: \"kubernetes.io/projected/795e498e-3709-4a3e-af83-5e47f4665716-kube-api-access-mk2wd\") pod \"cert-manager-operator-controller-manager-57cd46d6d-4k6pn\" (UID: \"795e498e-3709-4a3e-af83-5e47f4665716\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.671521 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk2wd\" (UniqueName: \"kubernetes.io/projected/795e498e-3709-4a3e-af83-5e47f4665716-kube-api-access-mk2wd\") pod \"cert-manager-operator-controller-manager-57cd46d6d-4k6pn\" (UID: \"795e498e-3709-4a3e-af83-5e47f4665716\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" Sep 30 13:49:56 crc kubenswrapper[4783]: I0930 13:49:56.810367 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" Sep 30 13:49:57 crc kubenswrapper[4783]: I0930 13:49:57.291860 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn"] Sep 30 13:49:57 crc kubenswrapper[4783]: I0930 13:49:57.365907 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" event={"ID":"795e498e-3709-4a3e-af83-5e47f4665716","Type":"ContainerStarted","Data":"6df75c40e799212137193271176b90ee4bb72e7f40b091cfc9e6337cab6ddf71"} Sep 30 13:50:05 crc kubenswrapper[4783]: I0930 13:50:05.413460 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" event={"ID":"795e498e-3709-4a3e-af83-5e47f4665716","Type":"ContainerStarted","Data":"e7459c4fed780d130669076b3a0232ffdfa956ffcc9b4258175f2a07c648757a"} Sep 30 13:50:05 crc kubenswrapper[4783]: I0930 13:50:05.434885 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-4k6pn" podStartSLOduration=2.540577787 podStartE2EDuration="9.434868384s" podCreationTimestamp="2025-09-30 13:49:56 +0000 UTC" firstStartedPulling="2025-09-30 13:49:57.303176269 +0000 UTC m=+897.234642576" lastFinishedPulling="2025-09-30 13:50:04.197466856 +0000 UTC m=+904.128933173" observedRunningTime="2025-09-30 13:50:05.432843149 +0000 UTC m=+905.364309456" watchObservedRunningTime="2025-09-30 13:50:05.434868384 +0000 UTC m=+905.366334701" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.574991 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-dtrgf"] Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.576657 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.582484 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.582741 4783 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-cdn2s" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.582800 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.600813 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-dtrgf"] Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.710805 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2zql\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-kube-api-access-m2zql\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.710991 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-bound-sa-token\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.812877 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2zql\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-kube-api-access-m2zql\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.812949 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-bound-sa-token\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.833797 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-bound-sa-token\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.834365 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2zql\" (UniqueName: \"kubernetes.io/projected/1ed5cd4e-785b-4581-92a8-9fe49236902e-kube-api-access-m2zql\") pod \"cert-manager-webhook-d969966f-dtrgf\" (UID: \"1ed5cd4e-785b-4581-92a8-9fe49236902e\") " pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:08 crc kubenswrapper[4783]: I0930 13:50:08.913390 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:09 crc kubenswrapper[4783]: I0930 13:50:09.318635 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-dtrgf"] Sep 30 13:50:09 crc kubenswrapper[4783]: I0930 13:50:09.437453 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" event={"ID":"1ed5cd4e-785b-4581-92a8-9fe49236902e","Type":"ContainerStarted","Data":"ca388acb68f0217eed28457699a84291d26f22606e458d5fb16800d83d71614e"} Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.059761 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4"] Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.061789 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.065958 4783 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-t4fjq" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.076814 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4"] Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.153065 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.153122 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4xzg\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-kube-api-access-k4xzg\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.254737 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.254821 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4xzg\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-kube-api-access-k4xzg\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.273775 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4xzg\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-kube-api-access-k4xzg\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.274608 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72af3d05-5e53-4c83-a786-8574f0b34aa3-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-tx4c4\" (UID: \"72af3d05-5e53-4c83-a786-8574f0b34aa3\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.401441 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" Sep 30 13:50:12 crc kubenswrapper[4783]: I0930 13:50:12.818752 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4"] Sep 30 13:50:13 crc kubenswrapper[4783]: I0930 13:50:13.463863 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" event={"ID":"72af3d05-5e53-4c83-a786-8574f0b34aa3","Type":"ContainerStarted","Data":"3252082d25937fd10405cc4e33b26b56f7da0b5b482ab5a90247543f39215810"} Sep 30 13:50:17 crc kubenswrapper[4783]: I0930 13:50:17.495298 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" event={"ID":"72af3d05-5e53-4c83-a786-8574f0b34aa3","Type":"ContainerStarted","Data":"906643f9f56fa852b634633d7d4b450b37336e21e587ddefaa6f4f707db47adc"} Sep 30 13:50:17 crc kubenswrapper[4783]: I0930 13:50:17.497503 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" event={"ID":"1ed5cd4e-785b-4581-92a8-9fe49236902e","Type":"ContainerStarted","Data":"b1f5462263bfc7bfda33753f4a381fa5c71018bdbf1fbbd8932b716066755113"} Sep 30 13:50:17 crc kubenswrapper[4783]: I0930 13:50:17.498463 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:17 crc kubenswrapper[4783]: I0930 13:50:17.518392 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-tx4c4" podStartSLOduration=2.001759242 podStartE2EDuration="5.518369449s" podCreationTimestamp="2025-09-30 13:50:12 +0000 UTC" firstStartedPulling="2025-09-30 13:50:12.831777263 +0000 UTC m=+912.763243570" lastFinishedPulling="2025-09-30 13:50:16.34838747 +0000 UTC m=+916.279853777" observedRunningTime="2025-09-30 13:50:17.511295884 +0000 UTC m=+917.442762231" watchObservedRunningTime="2025-09-30 13:50:17.518369449 +0000 UTC m=+917.449835776" Sep 30 13:50:23 crc kubenswrapper[4783]: I0930 13:50:23.916683 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" Sep 30 13:50:23 crc kubenswrapper[4783]: I0930 13:50:23.937120 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-dtrgf" podStartSLOduration=8.919936652 podStartE2EDuration="15.937104165s" podCreationTimestamp="2025-09-30 13:50:08 +0000 UTC" firstStartedPulling="2025-09-30 13:50:09.328758409 +0000 UTC m=+909.260224716" lastFinishedPulling="2025-09-30 13:50:16.345925922 +0000 UTC m=+916.277392229" observedRunningTime="2025-09-30 13:50:17.52748565 +0000 UTC m=+917.458951977" watchObservedRunningTime="2025-09-30 13:50:23.937104165 +0000 UTC m=+923.868570472" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.630044 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-w6ft5"] Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.632323 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.636835 4783 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-47lc5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.636833 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-w6ft5"] Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.774583 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2g6p\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-kube-api-access-z2g6p\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.774739 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.876460 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2g6p\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-kube-api-access-z2g6p\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.876596 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.896439 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.900523 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2g6p\" (UniqueName: \"kubernetes.io/projected/8d49829c-32eb-424e-9d8a-ca14c7f9d7fc-kube-api-access-z2g6p\") pod \"cert-manager-7d4cc89fcb-w6ft5\" (UID: \"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc\") " pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:27 crc kubenswrapper[4783]: I0930 13:50:27.969503 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" Sep 30 13:50:28 crc kubenswrapper[4783]: I0930 13:50:28.368391 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-w6ft5"] Sep 30 13:50:28 crc kubenswrapper[4783]: I0930 13:50:28.567458 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" event={"ID":"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc","Type":"ContainerStarted","Data":"8d3c7b24385ca15dd0f78f2055a2c4f82f9235fbd250744556b74a4d3d6f8822"} Sep 30 13:50:28 crc kubenswrapper[4783]: I0930 13:50:28.567507 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" event={"ID":"8d49829c-32eb-424e-9d8a-ca14c7f9d7fc","Type":"ContainerStarted","Data":"058db2f494aba0cea8e82b2d1defd30e64a29f2ef6995b18bfb40dde03f5c958"} Sep 30 13:50:28 crc kubenswrapper[4783]: I0930 13:50:28.586890 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-w6ft5" podStartSLOduration=1.586869236 podStartE2EDuration="1.586869236s" podCreationTimestamp="2025-09-30 13:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:50:28.585629886 +0000 UTC m=+928.517096203" watchObservedRunningTime="2025-09-30 13:50:28.586869236 +0000 UTC m=+928.518335553" Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.815524 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.817294 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.821496 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-29lj9" Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.821794 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.822107 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.836525 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:37 crc kubenswrapper[4783]: I0930 13:50:37.923954 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p2hc\" (UniqueName: \"kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc\") pod \"openstack-operator-index-6hd4r\" (UID: \"efa23ec2-d8ea-4dc3-84d3-a11288626b51\") " pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:38 crc kubenswrapper[4783]: I0930 13:50:38.025075 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p2hc\" (UniqueName: \"kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc\") pod \"openstack-operator-index-6hd4r\" (UID: \"efa23ec2-d8ea-4dc3-84d3-a11288626b51\") " pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:38 crc kubenswrapper[4783]: I0930 13:50:38.046209 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p2hc\" (UniqueName: \"kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc\") pod \"openstack-operator-index-6hd4r\" (UID: \"efa23ec2-d8ea-4dc3-84d3-a11288626b51\") " pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:38 crc kubenswrapper[4783]: I0930 13:50:38.138471 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:38 crc kubenswrapper[4783]: I0930 13:50:38.417871 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:38 crc kubenswrapper[4783]: I0930 13:50:38.634516 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6hd4r" event={"ID":"efa23ec2-d8ea-4dc3-84d3-a11288626b51","Type":"ContainerStarted","Data":"f682e94ddcd04b622157944c1c80f6a82e897da0428831180a9d1e17468ffc2b"} Sep 30 13:50:39 crc kubenswrapper[4783]: I0930 13:50:39.645454 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6hd4r" event={"ID":"efa23ec2-d8ea-4dc3-84d3-a11288626b51","Type":"ContainerStarted","Data":"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544"} Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.174187 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6hd4r" podStartSLOduration=3.324648293 podStartE2EDuration="4.174160554s" podCreationTimestamp="2025-09-30 13:50:37 +0000 UTC" firstStartedPulling="2025-09-30 13:50:38.438408309 +0000 UTC m=+938.369874636" lastFinishedPulling="2025-09-30 13:50:39.28792056 +0000 UTC m=+939.219386897" observedRunningTime="2025-09-30 13:50:39.659940281 +0000 UTC m=+939.591406618" watchObservedRunningTime="2025-09-30 13:50:41.174160554 +0000 UTC m=+941.105626891" Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.181035 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.663073 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-6hd4r" podUID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" containerName="registry-server" containerID="cri-o://8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544" gracePeriod=2 Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.799905 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-9c5wp"] Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.801359 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.814017 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-9c5wp"] Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.882029 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgl9d\" (UniqueName: \"kubernetes.io/projected/e71f47e2-3ec5-4c17-b78b-c2965f54fb22-kube-api-access-lgl9d\") pod \"openstack-operator-index-9c5wp\" (UID: \"e71f47e2-3ec5-4c17-b78b-c2965f54fb22\") " pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:41 crc kubenswrapper[4783]: I0930 13:50:41.983377 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgl9d\" (UniqueName: \"kubernetes.io/projected/e71f47e2-3ec5-4c17-b78b-c2965f54fb22-kube-api-access-lgl9d\") pod \"openstack-operator-index-9c5wp\" (UID: \"e71f47e2-3ec5-4c17-b78b-c2965f54fb22\") " pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.005651 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgl9d\" (UniqueName: \"kubernetes.io/projected/e71f47e2-3ec5-4c17-b78b-c2965f54fb22-kube-api-access-lgl9d\") pod \"openstack-operator-index-9c5wp\" (UID: \"e71f47e2-3ec5-4c17-b78b-c2965f54fb22\") " pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.063238 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.157950 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.186948 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p2hc\" (UniqueName: \"kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc\") pod \"efa23ec2-d8ea-4dc3-84d3-a11288626b51\" (UID: \"efa23ec2-d8ea-4dc3-84d3-a11288626b51\") " Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.190823 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc" (OuterVolumeSpecName: "kube-api-access-2p2hc") pod "efa23ec2-d8ea-4dc3-84d3-a11288626b51" (UID: "efa23ec2-d8ea-4dc3-84d3-a11288626b51"). InnerVolumeSpecName "kube-api-access-2p2hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.288302 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p2hc\" (UniqueName: \"kubernetes.io/projected/efa23ec2-d8ea-4dc3-84d3-a11288626b51-kube-api-access-2p2hc\") on node \"crc\" DevicePath \"\"" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.597336 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-9c5wp"] Sep 30 13:50:42 crc kubenswrapper[4783]: W0930 13:50:42.603649 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode71f47e2_3ec5_4c17_b78b_c2965f54fb22.slice/crio-e886cb4bd2eabaf848fbf551350e7b57f6807f64bc669cf911f4e58deb4fa316 WatchSource:0}: Error finding container e886cb4bd2eabaf848fbf551350e7b57f6807f64bc669cf911f4e58deb4fa316: Status 404 returned error can't find the container with id e886cb4bd2eabaf848fbf551350e7b57f6807f64bc669cf911f4e58deb4fa316 Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.674585 4783 generic.go:334] "Generic (PLEG): container finished" podID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" containerID="8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544" exitCode=0 Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.674675 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6hd4r" event={"ID":"efa23ec2-d8ea-4dc3-84d3-a11288626b51","Type":"ContainerDied","Data":"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544"} Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.674713 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6hd4r" event={"ID":"efa23ec2-d8ea-4dc3-84d3-a11288626b51","Type":"ContainerDied","Data":"f682e94ddcd04b622157944c1c80f6a82e897da0428831180a9d1e17468ffc2b"} Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.674739 4783 scope.go:117] "RemoveContainer" containerID="8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.674891 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6hd4r" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.680434 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-9c5wp" event={"ID":"e71f47e2-3ec5-4c17-b78b-c2965f54fb22","Type":"ContainerStarted","Data":"e886cb4bd2eabaf848fbf551350e7b57f6807f64bc669cf911f4e58deb4fa316"} Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.709524 4783 scope.go:117] "RemoveContainer" containerID="8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544" Sep 30 13:50:42 crc kubenswrapper[4783]: E0930 13:50:42.709930 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544\": container with ID starting with 8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544 not found: ID does not exist" containerID="8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.709958 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544"} err="failed to get container status \"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544\": rpc error: code = NotFound desc = could not find container \"8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544\": container with ID starting with 8d33838f1ea26b1ad47cf3ced0372de70f661231df07c648ff72ea04a23d2544 not found: ID does not exist" Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.715387 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.719413 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-6hd4r"] Sep 30 13:50:42 crc kubenswrapper[4783]: I0930 13:50:42.858081 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" path="/var/lib/kubelet/pods/efa23ec2-d8ea-4dc3-84d3-a11288626b51/volumes" Sep 30 13:50:43 crc kubenswrapper[4783]: I0930 13:50:43.700344 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-9c5wp" event={"ID":"e71f47e2-3ec5-4c17-b78b-c2965f54fb22","Type":"ContainerStarted","Data":"e8c7a1c097689392031af56b27c9d21e1e27d8f7358e0d2e9e19499370564942"} Sep 30 13:50:43 crc kubenswrapper[4783]: I0930 13:50:43.730542 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-9c5wp" podStartSLOduration=2.230024634 podStartE2EDuration="2.730521739s" podCreationTimestamp="2025-09-30 13:50:41 +0000 UTC" firstStartedPulling="2025-09-30 13:50:42.608789399 +0000 UTC m=+942.540255706" lastFinishedPulling="2025-09-30 13:50:43.109286484 +0000 UTC m=+943.040752811" observedRunningTime="2025-09-30 13:50:43.72113997 +0000 UTC m=+943.652606297" watchObservedRunningTime="2025-09-30 13:50:43.730521739 +0000 UTC m=+943.661988056" Sep 30 13:50:52 crc kubenswrapper[4783]: I0930 13:50:52.159149 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:52 crc kubenswrapper[4783]: I0930 13:50:52.160426 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:52 crc kubenswrapper[4783]: I0930 13:50:52.206741 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:52 crc kubenswrapper[4783]: I0930 13:50:52.809429 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-9c5wp" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.434585 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg"] Sep 30 13:50:54 crc kubenswrapper[4783]: E0930 13:50:54.434864 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" containerName="registry-server" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.434885 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" containerName="registry-server" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.435055 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="efa23ec2-d8ea-4dc3-84d3-a11288626b51" containerName="registry-server" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.436080 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.441765 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-bqfff" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.461415 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg"] Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.563719 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.563800 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjsdk\" (UniqueName: \"kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.563854 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.665425 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjsdk\" (UniqueName: \"kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.665548 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.665657 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.666155 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.666397 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.692193 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjsdk\" (UniqueName: \"kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk\") pod \"5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:54 crc kubenswrapper[4783]: I0930 13:50:54.773816 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:50:55 crc kubenswrapper[4783]: I0930 13:50:55.202537 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg"] Sep 30 13:50:55 crc kubenswrapper[4783]: I0930 13:50:55.791030 4783 generic.go:334] "Generic (PLEG): container finished" podID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerID="37bcf92cee78cba6c37d6faf1c10c233517df739c025dacfcc7e63e506b43ac8" exitCode=0 Sep 30 13:50:55 crc kubenswrapper[4783]: I0930 13:50:55.791080 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" event={"ID":"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a","Type":"ContainerDied","Data":"37bcf92cee78cba6c37d6faf1c10c233517df739c025dacfcc7e63e506b43ac8"} Sep 30 13:50:55 crc kubenswrapper[4783]: I0930 13:50:55.791107 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" event={"ID":"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a","Type":"ContainerStarted","Data":"8f9298457859353eeabc5ba7120d10fdebdbcbd75e98b869441550f730aabb79"} Sep 30 13:50:57 crc kubenswrapper[4783]: I0930 13:50:57.807688 4783 generic.go:334] "Generic (PLEG): container finished" podID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerID="68b5c7a811a858aea327c61f0c5965618ff8008f36378dc23907d4feb2bc6f10" exitCode=0 Sep 30 13:50:57 crc kubenswrapper[4783]: I0930 13:50:57.807810 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" event={"ID":"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a","Type":"ContainerDied","Data":"68b5c7a811a858aea327c61f0c5965618ff8008f36378dc23907d4feb2bc6f10"} Sep 30 13:50:58 crc kubenswrapper[4783]: I0930 13:50:58.817872 4783 generic.go:334] "Generic (PLEG): container finished" podID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerID="f6971b42bf85a7b32de0eff9ad48f09f277d8c213df86f53584154a8c2bcf7b6" exitCode=0 Sep 30 13:50:58 crc kubenswrapper[4783]: I0930 13:50:58.817957 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" event={"ID":"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a","Type":"ContainerDied","Data":"f6971b42bf85a7b32de0eff9ad48f09f277d8c213df86f53584154a8c2bcf7b6"} Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.171790 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.245614 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle\") pod \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.245682 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util\") pod \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.245704 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjsdk\" (UniqueName: \"kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk\") pod \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\" (UID: \"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a\") " Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.246939 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle" (OuterVolumeSpecName: "bundle") pod "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" (UID: "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.252321 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk" (OuterVolumeSpecName: "kube-api-access-sjsdk") pod "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" (UID: "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a"). InnerVolumeSpecName "kube-api-access-sjsdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.260694 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util" (OuterVolumeSpecName: "util") pod "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" (UID: "bb7e3b6e-d9cb-4a1b-a5be-44612248b60a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.348725 4783 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.348795 4783 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-util\") on node \"crc\" DevicePath \"\"" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.348822 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjsdk\" (UniqueName: \"kubernetes.io/projected/bb7e3b6e-d9cb-4a1b-a5be-44612248b60a-kube-api-access-sjsdk\") on node \"crc\" DevicePath \"\"" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.839701 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" event={"ID":"bb7e3b6e-d9cb-4a1b-a5be-44612248b60a","Type":"ContainerDied","Data":"8f9298457859353eeabc5ba7120d10fdebdbcbd75e98b869441550f730aabb79"} Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.839744 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f9298457859353eeabc5ba7120d10fdebdbcbd75e98b869441550f730aabb79" Sep 30 13:51:00 crc kubenswrapper[4783]: I0930 13:51:00.839802 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.933628 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt"] Sep 30 13:51:06 crc kubenswrapper[4783]: E0930 13:51:06.934242 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="pull" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.934253 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="pull" Sep 30 13:51:06 crc kubenswrapper[4783]: E0930 13:51:06.934268 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="util" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.934275 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="util" Sep 30 13:51:06 crc kubenswrapper[4783]: E0930 13:51:06.934286 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="extract" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.934291 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="extract" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.934388 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb7e3b6e-d9cb-4a1b-a5be-44612248b60a" containerName="extract" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.935007 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.937147 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-xvvqv" Sep 30 13:51:06 crc kubenswrapper[4783]: I0930 13:51:06.972454 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt"] Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.042792 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2vp7\" (UniqueName: \"kubernetes.io/projected/2939c11a-b620-4766-8e26-97ef66a8f4ad-kube-api-access-h2vp7\") pod \"openstack-operator-controller-operator-56dc567787-l5qvt\" (UID: \"2939c11a-b620-4766-8e26-97ef66a8f4ad\") " pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.143762 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2vp7\" (UniqueName: \"kubernetes.io/projected/2939c11a-b620-4766-8e26-97ef66a8f4ad-kube-api-access-h2vp7\") pod \"openstack-operator-controller-operator-56dc567787-l5qvt\" (UID: \"2939c11a-b620-4766-8e26-97ef66a8f4ad\") " pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.165609 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2vp7\" (UniqueName: \"kubernetes.io/projected/2939c11a-b620-4766-8e26-97ef66a8f4ad-kube-api-access-h2vp7\") pod \"openstack-operator-controller-operator-56dc567787-l5qvt\" (UID: \"2939c11a-b620-4766-8e26-97ef66a8f4ad\") " pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.254293 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.673499 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.673853 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.721233 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt"] Sep 30 13:51:07 crc kubenswrapper[4783]: I0930 13:51:07.892093 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" event={"ID":"2939c11a-b620-4766-8e26-97ef66a8f4ad","Type":"ContainerStarted","Data":"ebf924e9d491395d6298542207d259d903fd8b4a6aac0c3e22020b1b9a4ca050"} Sep 30 13:51:11 crc kubenswrapper[4783]: I0930 13:51:11.925584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" event={"ID":"2939c11a-b620-4766-8e26-97ef66a8f4ad","Type":"ContainerStarted","Data":"e8de4dc21ce77225404796908ab653ee8100542e1e4053a2cf084d4519b42e80"} Sep 30 13:51:14 crc kubenswrapper[4783]: I0930 13:51:14.950411 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" event={"ID":"2939c11a-b620-4766-8e26-97ef66a8f4ad","Type":"ContainerStarted","Data":"526b4e00805d6f191292efd4506ef6094ced414163927fff98f601baa486c398"} Sep 30 13:51:14 crc kubenswrapper[4783]: I0930 13:51:14.950993 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:14 crc kubenswrapper[4783]: I0930 13:51:14.993623 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" podStartSLOduration=2.31352823 podStartE2EDuration="8.993600858s" podCreationTimestamp="2025-09-30 13:51:06 +0000 UTC" firstStartedPulling="2025-09-30 13:51:07.760784903 +0000 UTC m=+967.692251200" lastFinishedPulling="2025-09-30 13:51:14.440857521 +0000 UTC m=+974.372323828" observedRunningTime="2025-09-30 13:51:14.989556579 +0000 UTC m=+974.921022906" watchObservedRunningTime="2025-09-30 13:51:14.993600858 +0000 UTC m=+974.925067165" Sep 30 13:51:17 crc kubenswrapper[4783]: I0930 13:51:17.258719 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-56dc567787-l5qvt" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.953138 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9"] Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.954613 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.956279 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-q48pd" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.967107 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9"] Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.971680 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-npt95"] Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.973045 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.975392 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-l7ms8" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.992511 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg"] Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.994034 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.997911 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-48rrg" Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.997931 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg"] Sep 30 13:51:33 crc kubenswrapper[4783]: I0930 13:51:33.999245 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.003432 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-npt95"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.006126 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-g9zlz" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.012067 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.013210 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.017363 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-nvcks" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.019096 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnsxz\" (UniqueName: \"kubernetes.io/projected/30aabf9c-e0d4-44ac-ae33-b5f7784941ce-kube-api-access-bnsxz\") pod \"glance-operator-controller-manager-8bc4775b5-d7vtg\" (UID: \"30aabf9c-e0d4-44ac-ae33-b5f7784941ce\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.028276 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.050562 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.070379 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.077981 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.079181 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.081003 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-628kb" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.104417 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.105298 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.107966 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.111616 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.111618 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-wswrv" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120189 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcw5f\" (UniqueName: \"kubernetes.io/projected/58b3d85e-a497-4e26-98ab-89101226c62a-kube-api-access-bcw5f\") pod \"horizon-operator-controller-manager-679b4759bb-l2n7h\" (UID: \"58b3d85e-a497-4e26-98ab-89101226c62a\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120246 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnsxz\" (UniqueName: \"kubernetes.io/projected/30aabf9c-e0d4-44ac-ae33-b5f7784941ce-kube-api-access-bnsxz\") pod \"glance-operator-controller-manager-8bc4775b5-d7vtg\" (UID: \"30aabf9c-e0d4-44ac-ae33-b5f7784941ce\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120271 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45645\" (UniqueName: \"kubernetes.io/projected/a3064510-3c7e-4094-a3b6-ae572fba1a95-kube-api-access-45645\") pod \"cinder-operator-controller-manager-859cd486d-npt95\" (UID: \"a3064510-3c7e-4094-a3b6-ae572fba1a95\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120304 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbq69\" (UniqueName: \"kubernetes.io/projected/d6159fd2-85d7-4d0b-8c24-042468d2cff3-kube-api-access-sbq69\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120323 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-598b6\" (UniqueName: \"kubernetes.io/projected/3b26411c-5b67-4660-9994-0500516afb9e-kube-api-access-598b6\") pod \"barbican-operator-controller-manager-f7f98cb69-bd2g9\" (UID: \"3b26411c-5b67-4660-9994-0500516afb9e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120339 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l2tr\" (UniqueName: \"kubernetes.io/projected/60247f30-05f5-49e6-81f8-7a91203afa8e-kube-api-access-4l2tr\") pod \"heat-operator-controller-manager-5b4fc86755-8qnst\" (UID: \"60247f30-05f5-49e6-81f8-7a91203afa8e\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120358 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2m7f\" (UniqueName: \"kubernetes.io/projected/0c47e365-48fb-43e8-9932-04850ec2344d-kube-api-access-s2m7f\") pod \"designate-operator-controller-manager-77fb7bcf5b-5x5rg\" (UID: \"0c47e365-48fb-43e8-9932-04850ec2344d\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.120395 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.126775 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.127738 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.130601 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.131681 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-prn4v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.150197 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.156771 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.175738 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6qpvf" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.181955 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnsxz\" (UniqueName: \"kubernetes.io/projected/30aabf9c-e0d4-44ac-ae33-b5f7784941ce-kube-api-access-bnsxz\") pod \"glance-operator-controller-manager-8bc4775b5-d7vtg\" (UID: \"30aabf9c-e0d4-44ac-ae33-b5f7784941ce\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230065 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230122 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcw5f\" (UniqueName: \"kubernetes.io/projected/58b3d85e-a497-4e26-98ab-89101226c62a-kube-api-access-bcw5f\") pod \"horizon-operator-controller-manager-679b4759bb-l2n7h\" (UID: \"58b3d85e-a497-4e26-98ab-89101226c62a\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230157 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45645\" (UniqueName: \"kubernetes.io/projected/a3064510-3c7e-4094-a3b6-ae572fba1a95-kube-api-access-45645\") pod \"cinder-operator-controller-manager-859cd486d-npt95\" (UID: \"a3064510-3c7e-4094-a3b6-ae572fba1a95\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230187 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbq69\" (UniqueName: \"kubernetes.io/projected/d6159fd2-85d7-4d0b-8c24-042468d2cff3-kube-api-access-sbq69\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230214 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-598b6\" (UniqueName: \"kubernetes.io/projected/3b26411c-5b67-4660-9994-0500516afb9e-kube-api-access-598b6\") pod \"barbican-operator-controller-manager-f7f98cb69-bd2g9\" (UID: \"3b26411c-5b67-4660-9994-0500516afb9e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230253 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l2tr\" (UniqueName: \"kubernetes.io/projected/60247f30-05f5-49e6-81f8-7a91203afa8e-kube-api-access-4l2tr\") pod \"heat-operator-controller-manager-5b4fc86755-8qnst\" (UID: \"60247f30-05f5-49e6-81f8-7a91203afa8e\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.230275 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2m7f\" (UniqueName: \"kubernetes.io/projected/0c47e365-48fb-43e8-9932-04850ec2344d-kube-api-access-s2m7f\") pod \"designate-operator-controller-manager-77fb7bcf5b-5x5rg\" (UID: \"0c47e365-48fb-43e8-9932-04850ec2344d\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.230338 4783 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.230398 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert podName:d6159fd2-85d7-4d0b-8c24-042468d2cff3 nodeName:}" failed. No retries permitted until 2025-09-30 13:51:34.730376782 +0000 UTC m=+994.661843089 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert") pod "infra-operator-controller-manager-7d9c7d9477-z27gn" (UID: "d6159fd2-85d7-4d0b-8c24-042468d2cff3") : secret "infra-operator-webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.232038 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.253471 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.258455 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcw5f\" (UniqueName: \"kubernetes.io/projected/58b3d85e-a497-4e26-98ab-89101226c62a-kube-api-access-bcw5f\") pod \"horizon-operator-controller-manager-679b4759bb-l2n7h\" (UID: \"58b3d85e-a497-4e26-98ab-89101226c62a\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.270080 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-598b6\" (UniqueName: \"kubernetes.io/projected/3b26411c-5b67-4660-9994-0500516afb9e-kube-api-access-598b6\") pod \"barbican-operator-controller-manager-f7f98cb69-bd2g9\" (UID: \"3b26411c-5b67-4660-9994-0500516afb9e\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.270633 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45645\" (UniqueName: \"kubernetes.io/projected/a3064510-3c7e-4094-a3b6-ae572fba1a95-kube-api-access-45645\") pod \"cinder-operator-controller-manager-859cd486d-npt95\" (UID: \"a3064510-3c7e-4094-a3b6-ae572fba1a95\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.271859 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.272445 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbq69\" (UniqueName: \"kubernetes.io/projected/d6159fd2-85d7-4d0b-8c24-042468d2cff3-kube-api-access-sbq69\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.272862 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.273638 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2m7f\" (UniqueName: \"kubernetes.io/projected/0c47e365-48fb-43e8-9932-04850ec2344d-kube-api-access-s2m7f\") pod \"designate-operator-controller-manager-77fb7bcf5b-5x5rg\" (UID: \"0c47e365-48fb-43e8-9932-04850ec2344d\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.274672 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-7zdnh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.277176 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.278005 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l2tr\" (UniqueName: \"kubernetes.io/projected/60247f30-05f5-49e6-81f8-7a91203afa8e-kube-api-access-4l2tr\") pod \"heat-operator-controller-manager-5b4fc86755-8qnst\" (UID: \"60247f30-05f5-49e6-81f8-7a91203afa8e\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.287276 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.297102 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.298269 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.301006 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-vgnbk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.301458 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.311093 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.312255 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.314955 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.316512 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-vcnqw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.321688 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.323644 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.330730 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.330950 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q9m5\" (UniqueName: \"kubernetes.io/projected/fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa-kube-api-access-4q9m5\") pod \"keystone-operator-controller-manager-59d7dc95cf-prn5v\" (UID: \"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.330991 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqgmw\" (UniqueName: \"kubernetes.io/projected/3b82a357-f2a6-42e3-8d29-88368c0a3e43-kube-api-access-xqgmw\") pod \"ironic-operator-controller-manager-6f589bc7f7-t2dkk\" (UID: \"3b82a357-f2a6-42e3-8d29-88368c0a3e43\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.331930 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.337136 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-87p8j" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.337307 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.337488 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.346107 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.347349 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.351416 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-cv4td" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.354913 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.361095 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.365091 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.368254 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.373549 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.374327 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hxnlg" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.374498 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.374756 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.378979 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.379484 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-w9hmb" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.385718 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.386639 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.388530 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-x5f28" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.390759 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.391861 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.393992 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-k54f5" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.401689 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.404582 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.406984 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.416071 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.431728 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q9m5\" (UniqueName: \"kubernetes.io/projected/fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa-kube-api-access-4q9m5\") pod \"keystone-operator-controller-manager-59d7dc95cf-prn5v\" (UID: \"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.431778 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m985f\" (UniqueName: \"kubernetes.io/projected/26213271-94ee-4549-99d6-b30ba62e00fc-kube-api-access-m985f\") pod \"mariadb-operator-controller-manager-67bf5bb885-vd9cw\" (UID: \"26213271-94ee-4549-99d6-b30ba62e00fc\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.431809 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqgmw\" (UniqueName: \"kubernetes.io/projected/3b82a357-f2a6-42e3-8d29-88368c0a3e43-kube-api-access-xqgmw\") pod \"ironic-operator-controller-manager-6f589bc7f7-t2dkk\" (UID: \"3b82a357-f2a6-42e3-8d29-88368c0a3e43\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.431858 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc2g9\" (UniqueName: \"kubernetes.io/projected/52109482-0b58-40de-a483-cfa4b8e33eee-kube-api-access-nc2g9\") pod \"neutron-operator-controller-manager-6b96467f46-8l52m\" (UID: \"52109482-0b58-40de-a483-cfa4b8e33eee\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.431890 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppsqd\" (UniqueName: \"kubernetes.io/projected/58ca2541-d4dc-4a6f-9e5f-0ad539e65808-kube-api-access-ppsqd\") pod \"manila-operator-controller-manager-b7cf8cb5f-mvkjc\" (UID: \"58ca2541-d4dc-4a6f-9e5f-0ad539e65808\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.461764 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqgmw\" (UniqueName: \"kubernetes.io/projected/3b82a357-f2a6-42e3-8d29-88368c0a3e43-kube-api-access-xqgmw\") pod \"ironic-operator-controller-manager-6f589bc7f7-t2dkk\" (UID: \"3b82a357-f2a6-42e3-8d29-88368c0a3e43\") " pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.462149 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q9m5\" (UniqueName: \"kubernetes.io/projected/fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa-kube-api-access-4q9m5\") pod \"keystone-operator-controller-manager-59d7dc95cf-prn5v\" (UID: \"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.473856 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.495132 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.497565 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.507459 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.509304 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-mg74z" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532371 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532747 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m985f\" (UniqueName: \"kubernetes.io/projected/26213271-94ee-4549-99d6-b30ba62e00fc-kube-api-access-m985f\") pod \"mariadb-operator-controller-manager-67bf5bb885-vd9cw\" (UID: \"26213271-94ee-4549-99d6-b30ba62e00fc\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532801 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmbg4\" (UniqueName: \"kubernetes.io/projected/53bff1a7-c605-4e9e-8311-9157240d03b4-kube-api-access-rmbg4\") pod \"nova-operator-controller-manager-79f9fc9fd8-lpmr4\" (UID: \"53bff1a7-c605-4e9e-8311-9157240d03b4\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532846 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc2g9\" (UniqueName: \"kubernetes.io/projected/52109482-0b58-40de-a483-cfa4b8e33eee-kube-api-access-nc2g9\") pod \"neutron-operator-controller-manager-6b96467f46-8l52m\" (UID: \"52109482-0b58-40de-a483-cfa4b8e33eee\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532875 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b59vm\" (UniqueName: \"kubernetes.io/projected/8c02b85f-2ca3-48a6-86ad-01ce12288522-kube-api-access-b59vm\") pod \"ovn-operator-controller-manager-84c745747f-59mrh\" (UID: \"8c02b85f-2ca3-48a6-86ad-01ce12288522\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532896 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppsqd\" (UniqueName: \"kubernetes.io/projected/58ca2541-d4dc-4a6f-9e5f-0ad539e65808-kube-api-access-ppsqd\") pod \"manila-operator-controller-manager-b7cf8cb5f-mvkjc\" (UID: \"58ca2541-d4dc-4a6f-9e5f-0ad539e65808\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532916 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s6j7\" (UniqueName: \"kubernetes.io/projected/f2e6ebfe-0822-4070-b60c-64974705fd4e-kube-api-access-5s6j7\") pod \"placement-operator-controller-manager-598c4c8547-q6vmx\" (UID: \"f2e6ebfe-0822-4070-b60c-64974705fd4e\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532931 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pszb4\" (UniqueName: \"kubernetes.io/projected/c9f201b9-c0fe-4a29-bcad-6ae49742b3ff-kube-api-access-pszb4\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-h9b4s\" (UID: \"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532953 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.532980 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjfvg\" (UniqueName: \"kubernetes.io/projected/c1065d45-bf09-456a-af5c-23f62fb0780c-kube-api-access-gjfvg\") pod \"swift-operator-controller-manager-657c6b68c7-56wct\" (UID: \"c1065d45-bf09-456a-af5c-23f62fb0780c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.533001 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk8f5\" (UniqueName: \"kubernetes.io/projected/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-kube-api-access-bk8f5\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.534571 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.535709 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.539750 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mzdlh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.557927 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.558257 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppsqd\" (UniqueName: \"kubernetes.io/projected/58ca2541-d4dc-4a6f-9e5f-0ad539e65808-kube-api-access-ppsqd\") pod \"manila-operator-controller-manager-b7cf8cb5f-mvkjc\" (UID: \"58ca2541-d4dc-4a6f-9e5f-0ad539e65808\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.582733 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc2g9\" (UniqueName: \"kubernetes.io/projected/52109482-0b58-40de-a483-cfa4b8e33eee-kube-api-access-nc2g9\") pod \"neutron-operator-controller-manager-6b96467f46-8l52m\" (UID: \"52109482-0b58-40de-a483-cfa4b8e33eee\") " pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.615402 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.616814 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m985f\" (UniqueName: \"kubernetes.io/projected/26213271-94ee-4549-99d6-b30ba62e00fc-kube-api-access-m985f\") pod \"mariadb-operator-controller-manager-67bf5bb885-vd9cw\" (UID: \"26213271-94ee-4549-99d6-b30ba62e00fc\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.617542 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.619832 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-qk52n" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.634417 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b59vm\" (UniqueName: \"kubernetes.io/projected/8c02b85f-2ca3-48a6-86ad-01ce12288522-kube-api-access-b59vm\") pod \"ovn-operator-controller-manager-84c745747f-59mrh\" (UID: \"8c02b85f-2ca3-48a6-86ad-01ce12288522\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.634819 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s6j7\" (UniqueName: \"kubernetes.io/projected/f2e6ebfe-0822-4070-b60c-64974705fd4e-kube-api-access-5s6j7\") pod \"placement-operator-controller-manager-598c4c8547-q6vmx\" (UID: \"f2e6ebfe-0822-4070-b60c-64974705fd4e\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.634912 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pszb4\" (UniqueName: \"kubernetes.io/projected/c9f201b9-c0fe-4a29-bcad-6ae49742b3ff-kube-api-access-pszb4\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-h9b4s\" (UID: \"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.635001 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnfdg\" (UniqueName: \"kubernetes.io/projected/137c2cf3-bda8-4da6-a9c1-d430e636c745-kube-api-access-fnfdg\") pod \"telemetry-operator-controller-manager-cb66d6b59-ml9lk\" (UID: \"137c2cf3-bda8-4da6-a9c1-d430e636c745\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.635084 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.635171 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjfvg\" (UniqueName: \"kubernetes.io/projected/c1065d45-bf09-456a-af5c-23f62fb0780c-kube-api-access-gjfvg\") pod \"swift-operator-controller-manager-657c6b68c7-56wct\" (UID: \"c1065d45-bf09-456a-af5c-23f62fb0780c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.635257 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk8f5\" (UniqueName: \"kubernetes.io/projected/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-kube-api-access-bk8f5\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.635175 4783 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.635394 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert podName:ade21fb8-be1e-4bdd-a8a7-16c6d2124570 nodeName:}" failed. No retries permitted until 2025-09-30 13:51:35.135375036 +0000 UTC m=+995.066841423 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert") pod "openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" (UID: "ade21fb8-be1e-4bdd-a8a7-16c6d2124570") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.635471 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmbg4\" (UniqueName: \"kubernetes.io/projected/53bff1a7-c605-4e9e-8311-9157240d03b4-kube-api-access-rmbg4\") pod \"nova-operator-controller-manager-79f9fc9fd8-lpmr4\" (UID: \"53bff1a7-c605-4e9e-8311-9157240d03b4\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.653424 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.655668 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmbg4\" (UniqueName: \"kubernetes.io/projected/53bff1a7-c605-4e9e-8311-9157240d03b4-kube-api-access-rmbg4\") pod \"nova-operator-controller-manager-79f9fc9fd8-lpmr4\" (UID: \"53bff1a7-c605-4e9e-8311-9157240d03b4\") " pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.655802 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk8f5\" (UniqueName: \"kubernetes.io/projected/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-kube-api-access-bk8f5\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.656797 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pszb4\" (UniqueName: \"kubernetes.io/projected/c9f201b9-c0fe-4a29-bcad-6ae49742b3ff-kube-api-access-pszb4\") pod \"octavia-operator-controller-manager-6fb7d6b8bf-h9b4s\" (UID: \"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff\") " pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.657083 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s6j7\" (UniqueName: \"kubernetes.io/projected/f2e6ebfe-0822-4070-b60c-64974705fd4e-kube-api-access-5s6j7\") pod \"placement-operator-controller-manager-598c4c8547-q6vmx\" (UID: \"f2e6ebfe-0822-4070-b60c-64974705fd4e\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.659776 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjfvg\" (UniqueName: \"kubernetes.io/projected/c1065d45-bf09-456a-af5c-23f62fb0780c-kube-api-access-gjfvg\") pod \"swift-operator-controller-manager-657c6b68c7-56wct\" (UID: \"c1065d45-bf09-456a-af5c-23f62fb0780c\") " pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.662305 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b59vm\" (UniqueName: \"kubernetes.io/projected/8c02b85f-2ca3-48a6-86ad-01ce12288522-kube-api-access-b59vm\") pod \"ovn-operator-controller-manager-84c745747f-59mrh\" (UID: \"8c02b85f-2ca3-48a6-86ad-01ce12288522\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.662511 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-npt95"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.665283 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.702638 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.703784 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.719858 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.721614 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.721726 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.725950 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-hnn64" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.726118 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.736688 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.736733 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xbsr\" (UniqueName: \"kubernetes.io/projected/719cc658-fa4f-4ae9-878a-47e8fdc9c2cc-kube-api-access-5xbsr\") pod \"test-operator-controller-manager-6bb97fcf96-nk8b4\" (UID: \"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.736777 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfpjs\" (UniqueName: \"kubernetes.io/projected/d964b590-5ba0-4c6b-bee8-3c52f4950d9f-kube-api-access-vfpjs\") pod \"watcher-operator-controller-manager-75756dd4d9-ntrpm\" (UID: \"d964b590-5ba0-4c6b-bee8-3c52f4950d9f\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.736795 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnfdg\" (UniqueName: \"kubernetes.io/projected/137c2cf3-bda8-4da6-a9c1-d430e636c745-kube-api-access-fnfdg\") pod \"telemetry-operator-controller-manager-cb66d6b59-ml9lk\" (UID: \"137c2cf3-bda8-4da6-a9c1-d430e636c745\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.745694 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6159fd2-85d7-4d0b-8c24-042468d2cff3-cert\") pod \"infra-operator-controller-manager-7d9c7d9477-z27gn\" (UID: \"d6159fd2-85d7-4d0b-8c24-042468d2cff3\") " pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.750147 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.750707 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.777548 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.788267 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.789290 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.791958 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnfdg\" (UniqueName: \"kubernetes.io/projected/137c2cf3-bda8-4da6-a9c1-d430e636c745-kube-api-access-fnfdg\") pod \"telemetry-operator-controller-manager-cb66d6b59-ml9lk\" (UID: \"137c2cf3-bda8-4da6-a9c1-d430e636c745\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.792402 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-8wcgb" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.795455 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.816645 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t"] Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.833447 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.837601 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94f5v\" (UniqueName: \"kubernetes.io/projected/45485bf9-44cc-4f01-a72e-85f9efc22357-kube-api-access-94f5v\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.837648 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xbsr\" (UniqueName: \"kubernetes.io/projected/719cc658-fa4f-4ae9-878a-47e8fdc9c2cc-kube-api-access-5xbsr\") pod \"test-operator-controller-manager-6bb97fcf96-nk8b4\" (UID: \"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.839624 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfpjs\" (UniqueName: \"kubernetes.io/projected/d964b590-5ba0-4c6b-bee8-3c52f4950d9f-kube-api-access-vfpjs\") pod \"watcher-operator-controller-manager-75756dd4d9-ntrpm\" (UID: \"d964b590-5ba0-4c6b-bee8-3c52f4950d9f\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.839788 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.861108 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xbsr\" (UniqueName: \"kubernetes.io/projected/719cc658-fa4f-4ae9-878a-47e8fdc9c2cc-kube-api-access-5xbsr\") pod \"test-operator-controller-manager-6bb97fcf96-nk8b4\" (UID: \"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc\") " pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.867114 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfpjs\" (UniqueName: \"kubernetes.io/projected/d964b590-5ba0-4c6b-bee8-3c52f4950d9f-kube-api-access-vfpjs\") pod \"watcher-operator-controller-manager-75756dd4d9-ntrpm\" (UID: \"d964b590-5ba0-4c6b-bee8-3c52f4950d9f\") " pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.941140 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsvh8\" (UniqueName: \"kubernetes.io/projected/987e3b87-31ee-4a44-8829-61e239d3945a-kube-api-access-rsvh8\") pod \"rabbitmq-cluster-operator-manager-79d8469568-mms9t\" (UID: \"987e3b87-31ee-4a44-8829-61e239d3945a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.941254 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94f5v\" (UniqueName: \"kubernetes.io/projected/45485bf9-44cc-4f01-a72e-85f9efc22357-kube-api-access-94f5v\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.941505 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.941747 4783 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: E0930 13:51:34.941816 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert podName:45485bf9-44cc-4f01-a72e-85f9efc22357 nodeName:}" failed. No retries permitted until 2025-09-30 13:51:35.441784323 +0000 UTC m=+995.373250630 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert") pod "openstack-operator-controller-manager-7b7bb8bd67-qzlt6" (UID: "45485bf9-44cc-4f01-a72e-85f9efc22357") : secret "webhook-server-cert" not found Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.956989 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.962430 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94f5v\" (UniqueName: \"kubernetes.io/projected/45485bf9-44cc-4f01-a72e-85f9efc22357-kube-api-access-94f5v\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:34 crc kubenswrapper[4783]: I0930 13:51:34.993732 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.011356 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.021751 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.042367 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsvh8\" (UniqueName: \"kubernetes.io/projected/987e3b87-31ee-4a44-8829-61e239d3945a-kube-api-access-rsvh8\") pod \"rabbitmq-cluster-operator-manager-79d8469568-mms9t\" (UID: \"987e3b87-31ee-4a44-8829-61e239d3945a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.050258 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.069199 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsvh8\" (UniqueName: \"kubernetes.io/projected/987e3b87-31ee-4a44-8829-61e239d3945a-kube-api-access-rsvh8\") pod \"rabbitmq-cluster-operator-manager-79d8469568-mms9t\" (UID: \"987e3b87-31ee-4a44-8829-61e239d3945a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.077871 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" event={"ID":"a3064510-3c7e-4094-a3b6-ae572fba1a95","Type":"ContainerStarted","Data":"9e56f41297ea0c2dc24aee413d9d60f3232de0870314147f3ae13f38d1d61dc8"} Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.103884 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b26411c_5b67_4660_9994_0500516afb9e.slice/crio-968d0022e8b4ead69c465324c88047146d3487be4951a6b6473fa0ac62dbf198 WatchSource:0}: Error finding container 968d0022e8b4ead69c465324c88047146d3487be4951a6b6473fa0ac62dbf198: Status 404 returned error can't find the container with id 968d0022e8b4ead69c465324c88047146d3487be4951a6b6473fa0ac62dbf198 Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.145338 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.150840 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ade21fb8-be1e-4bdd-a8a7-16c6d2124570-cert\") pod \"openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq\" (UID: \"ade21fb8-be1e-4bdd-a8a7-16c6d2124570\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.195868 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.221378 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg"] Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.241394 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c47e365_48fb_43e8_9932_04850ec2344d.slice/crio-e426c26305140fabbcd41ba4dab6fc3a298d4589be90e983d289a3036cb54540 WatchSource:0}: Error finding container e426c26305140fabbcd41ba4dab6fc3a298d4589be90e983d289a3036cb54540: Status 404 returned error can't find the container with id e426c26305140fabbcd41ba4dab6fc3a298d4589be90e983d289a3036cb54540 Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.321523 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.331848 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.341043 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.410756 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.428461 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcc8d2d0_1c61_4f4a_95a4_1ff2f80eb7fa.slice/crio-fbe1abe5b13355225b37e5c7d005bade6d62602f3e4e7e8bae6fd0a9c7748c32 WatchSource:0}: Error finding container fbe1abe5b13355225b37e5c7d005bade6d62602f3e4e7e8bae6fd0a9c7748c32: Status 404 returned error can't find the container with id fbe1abe5b13355225b37e5c7d005bade6d62602f3e4e7e8bae6fd0a9c7748c32 Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.428511 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.433108 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk"] Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.437682 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b82a357_f2a6_42e3_8d29_88368c0a3e43.slice/crio-b7341928bed13d8f79358aac6beb4249db9ad2e82cf61935801cde7e133e73ec WatchSource:0}: Error finding container b7341928bed13d8f79358aac6beb4249db9ad2e82cf61935801cde7e133e73ec: Status 404 returned error can't find the container with id b7341928bed13d8f79358aac6beb4249db9ad2e82cf61935801cde7e133e73ec Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.451589 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.451757 4783 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.451846 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert podName:45485bf9-44cc-4f01-a72e-85f9efc22357 nodeName:}" failed. No retries permitted until 2025-09-30 13:51:36.451825192 +0000 UTC m=+996.383291499 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert") pod "openstack-operator-controller-manager-7b7bb8bd67-qzlt6" (UID: "45485bf9-44cc-4f01-a72e-85f9efc22357") : secret "webhook-server-cert" not found Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.680961 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.693137 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.704144 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.713628 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.730441 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.743570 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.747648 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.751924 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh"] Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.752153 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pszb4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6fb7d6b8bf-h9b4s_openstack-operators(c9f201b9-c0fe-4a29-bcad-6ae49742b3ff): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.754975 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b59vm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-84c745747f-59mrh_openstack-operators(8c02b85f-2ca3-48a6-86ad-01ce12288522): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.813251 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4"] Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.819957 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5xbsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6bb97fcf96-nk8b4_openstack-operators(719cc658-fa4f-4ae9-878a-47e8fdc9c2cc): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.826020 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.838179 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx"] Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.847577 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk"] Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.850503 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent@sha256:89f9e06c633ae852be8d3e3ca581def0a6e9a5b38c0d519f656976c7414b6b97,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:56f155abc1b8734e4a79c7306ba38caf8d2881625f37d2f9c5a5763fa4db7e02,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api@sha256:29c8cd4f2d853f512e2ecd44f522f28c3aac046a72733365aa5e91667041d62e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator@sha256:ed896681f0d9720f56bbcb0b7a4f3626ed397e89af919604ca68b42b7b598859,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener@sha256:712e1c932a90ef5e3c3ee5d5aea591a377da8c4af604ebd8ec399869a61dfbef,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier@sha256:10fd8489a5bf6f1d781e9226de68356132db78b62269e69d632748cb08fae725,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:73fd28af83ea96cc920d26dba6105ee59f0824234527949884e6ca55b71d7533,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener@sha256:8b3a90516ba0695cf3198a7b101da770c30c8100cb79f8088b5729e6a50ddd6d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker@sha256:6d42bcf65422d2de9cd807feb3e8b005de10084b4b8eb340c8a9045644ae7aaa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:32a25ac44706b73bff04a89514177b1efd675f0442b295e225f0020555ca6350,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute@sha256:b19043eac7c653e00da8da9418ae378fdd29698adb1adb4bf5ae7cfc03ba5538,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi@sha256:c486e00b36ea7698d6a4cd9048a759bad5a8286e4949bbd1f82c3ddb70600b9b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter@sha256:7211a617ec657701ca819aa0ba28e1d5750f5bf2c1391b755cc4a48cc360b0fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification@sha256:ef2727f0300fbf3bf15d8ddc409d0fd63e4aac9dd64c86459bd6ff64fc6b9534,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup@sha256:329aac65ba00c3cf43bb1d5fac8818752f01de90b47719e2a84db4e2fe083292,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler@sha256:6ce73885ac1ee7c69468efc448eff5deae46502812c5e3d099f771e1cc03345f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume@sha256:282cc0fcdbb8a688dd62a2499480aae4a36b620f2160d51e6c8269e6cc32d5fc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api@sha256:d98c0c9d3bdd84daf4b98d45b8bbe2e67a633491897dda7167664a5fa1f0f26e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9@sha256:4ad1d36fe1c8992e43910fc2d566b991fd73f9b82b1ab860c66858448ff82c00,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central@sha256:92789eab1b8a91807a5e898cb63478d125ae539eafe63c96049100c6ddeadb04,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns@sha256:ee9832268e0df5d62c50c5ce171e9ef72a035aa74c718cfbf482e34426d8d15e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer@sha256:07b4f96f24f32224c13613f85173f9fcc3092b8797ffa47519403d124bfe4c15,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound@sha256:3a873c95bcb7ae8bd24ff1eb5fe89ac5272a41a3345a7b41d55419b5d66b70e7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker@sha256:388dbae2f1aae2720e919cc24d10cd577b73b4e4ef7abdc34287bcb8d27ff98f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr@sha256:d4c1b2496868da3dcca9f4bda0834fcc58d23c21d8ce3c42a68205d02039c487,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid@sha256:c4414cc2680fb1bacbf99261f759f4ef7401fb2e4953140270bffdab8e002f22,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler@sha256:581b65b646301e0fcb07582150ba63438f1353a85bf9acf1eb2acb4ce71c58bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron@sha256:b9b950a656f1456b3143872c492b0987bf4a9e23bc7c59d843cf50099667b368,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd@sha256:afd5d6822b86ea0930b2011fede834bb24495995d7baac03363ab61d89f07a22,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent@sha256:665d7a25dfc959ec5448d5ba6b430792ebde1be1580ea6809e9b3b4f94184b3f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn@sha256:499c6d82390ee2dbb91628d2e42671406372fb603d697685a04145cf6dd8d0ab,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent@sha256:da2736bc98bfe340e86234523d4c00220f6f79add271900981cf4ad9f4c5ee51,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent@sha256:4df8dad8a5fb4805a0424cbc0b8df666b9a06b76c64f26e186f3b9e8efe6cd95,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter@sha256:39c642b2b337e38c18e80266fb14383754178202f40103646337722a594d984c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent@sha256:65c16453b5b7bb113646ffce0be26138e89eecbf6dd1582cdfe76af7f5dc62cf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter@sha256:d339ba049bbd1adccb795962bf163f5b22fd84dea865d88b9eb525e46247d6bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:bdfed2a176a064bf70082602a1f319eace2d9003ff1117b1e48b7f2130840070,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api@sha256:ce968dce2209ec5114772b4b73ed16c0a25988637372f2afbfac080cc6f1e378,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn@sha256:b7823eaacf55280cdf3f1bede4f40bf49fdbf9ba9f3f5ba64b0abedede601c8f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine@sha256:605206d967ffaa20156eb07a645654cd3e0f880bb0eefbb2b5e1e749b169f148,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached@sha256:34e84da4ae7e5d65931cbefcda84fd8fdc93271ec466adf1a9040b67a3af176a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis@sha256:b301b17c31e47733a8a232773427ce3cb50433a3aa09d4a5bd998b1aeb5e5530,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api@sha256:d642c35c0f9d3acf31987c028f1d4d4fdf7b49e1d6cbcd73268c12b3d6e14b86,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor@sha256:922eb0799ab36a91aa95abe52565dc60db807457dbf8c651b30e06b9e8aebcd4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector@sha256:cd01e9605ab513458a6813e38d37fbfde1a91388cc5c00962203dbcbdc285e79,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent@sha256:dd35c22b17730cbca8547ea98459f182939462c8dc3465d21335a377018937de,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe@sha256:0e0e2e48a41d5417f1d6a4407e63d443611b7eacd66e27f561c9eedf3e5a66c5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent@sha256:735bd24219fdb5f21c31313a5bc685364f45c004fb5e8af634984c147060d4e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone@sha256:35b5554efae34f2c25a2d274c78bdaecf3d4ce949fa61c692835ee54cdfc6d74,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api@sha256:01b93ab0d87482b9a1fd46706771974743dea1ca74f5fcc3de4a560f7cfc033b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler@sha256:87471fbe3ba77b7115096f4fef8f5a9e1468cbd5bf6060c09785a60f9107a717,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share@sha256:947dcc46173064939cba252d5db34eb6ddd05eb0af7afd762beebe77e9a72c6e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:8498ed720d02ce4e7045f7eb0051b138274cddba9b1e443d11e413da3474d3a3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils@sha256:2cb054830655a6af5fc6848360618676d24fd9cf15078c0b9855e09d05733eec,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api@sha256:0f5f8f560cd3b4951f7e8e67ef570575435b4c6915658cbb66f32a201776078b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute@sha256:7055e8d7b7d72ce697c6077be14c525c019d186002f04765b90a14c82e01cc7c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor@sha256:d2cd7a21461b4b569d93a63d57761f437cf6bd0847d69a3a65f64d400c7cca6d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy@sha256:432c0c6f36a5e4e4db394771f7dc72f3bf9e5060dc4220f781d3c5050cc17f0d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler@sha256:3ff379a74cc15352bfa25605dbb1a5f4250620e8364bf87ed2f3d5c17e6a8b26,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api@sha256:c67a7bba2fc9351c302369b590473a737bab20d0982d227756fe1fa0bc1c8773,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:50c613d159667a26ba4bfb7aebf157b8db8919c815a866438b1d2700231a508e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping@sha256:f3d3d7a7c83926a09714199406bfe8070e6be5055cbfbf00aa37f47e1e5e9bc9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog@sha256:e9b3260907b0e417bb779a7d513a2639734cbbf792e77c61e05e760d06978f4a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker@sha256:1aa6a76e67f2d91ee45472741238b5d4ab53f9bcb94db678c7ae92e1af28899d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:80b8547cf5821a4eb5461d1ac14edbc700ef03926268af960bf511647de027af,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content@sha256:7086442096db5ceb68e22bcce00688072957fdad07d00d8f18eb0506ad958923,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather@sha256:bf42dfd2e225818662aa28c4bb23204dc47b2b91127ca0e49b085baa1ea7609d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter@sha256:ecd56e6733c475f2d441344fd98f288c3eac0261ba113695fec7520a954ccbc7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi@sha256:bd08ffdb4dcfd436200d846d15b2bdcc14122fa43adfea4c0980a087a18f9e3e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:2d1e733d24df6ca02636374147f801a0ec1509f8db2f9ad8c739b3f2341815fd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:c08ba2a0df4cc18e615b25c329e9c74153709b435c032c38502ec78ba297c5fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:b6cdafc7722def5b63ef4f00251e10aca93ef82628b21e88925c3d4b49277316,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:0a0bbe43e3c266dfeb40a09036f76393dc70377b636724c130a29c434f6d6c82,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server@sha256:7387b628d7cfb3ff349e0df6f11f41ae7fdb0e2d55844944896af02a81ac7cf7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:9a3671dee1752ebe3639a0b16de95d29e779f1629d563e0585d65b9792542fc9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:37cc031749b113c35231066ce9f8ce7ccc83e21808ba92ea1981e72bbc42e80f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account@sha256:b2782fe02b1438d68308a5847b0628f0971b5bb8bb0a4d20fe15176fa75bd33f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container@sha256:7118cc3a695fead2a8bab14c8ace018ed7a5ba23ef347bf4ead44219e8467866,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object@sha256:793a836e17b07b0e0a4e8d3177fd04724e1e058fca275ef434abe60a2e444a79,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server@sha256:713d74dc81859344bdcae68a9f7a954146c3e68cfa819518a58cce9e896298c8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all@sha256:e39be536015777a1b0df8ac863f354046b2b15fee8482abd37d2fa59d8074208,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api@sha256:28e209c66bc86354495ac7793f2e66db0e8540485590742ab1b53a7cf24cb4fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier@sha256:d117753b6cff563084bf771173ea89a2ce00854efdc45447667e5d230c60c363,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine@sha256:f1aac0a57d83b085c37cf75ce0a56f85b68353b1a88740b64a5858bc93dba36b,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bk8f5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq_openstack-operators(ade21fb8-be1e-4bdd-a8a7-16c6d2124570): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.863141 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2e6ebfe_0822_4070_b60c_64974705fd4e.slice/crio-31dd4128f5c289c18fb3efc510d24090b14c5630b1f9662d64c48fdde9e8b9b6 WatchSource:0}: Error finding container 31dd4128f5c289c18fb3efc510d24090b14c5630b1f9662d64c48fdde9e8b9b6: Status 404 returned error can't find the container with id 31dd4128f5c289c18fb3efc510d24090b14c5630b1f9662d64c48fdde9e8b9b6 Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.863584 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod137c2cf3_bda8_4da6_a9c1_d430e636c745.slice/crio-c1e1737d90c0546ecbf79ebb70d7adbdc3d8442309d471e8e1b3adc4526aa0bb WatchSource:0}: Error finding container c1e1737d90c0546ecbf79ebb70d7adbdc3d8442309d471e8e1b3adc4526aa0bb: Status 404 returned error can't find the container with id c1e1737d90c0546ecbf79ebb70d7adbdc3d8442309d471e8e1b3adc4526aa0bb Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.866319 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5s6j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-598c4c8547-q6vmx_openstack-operators(f2e6ebfe-0822-4070-b60c-64974705fd4e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.875870 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fnfdg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-cb66d6b59-ml9lk_openstack-operators(137c2cf3-bda8-4da6-a9c1-d430e636c745): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.882899 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm"] Sep 30 13:51:35 crc kubenswrapper[4783]: W0930 13:51:35.899010 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd964b590_5ba0_4c6b_bee8_3c52f4950d9f.slice/crio-432619166cb2758657c3ea9f025868d196e0399a2ad4772a809cd1e6a248c677 WatchSource:0}: Error finding container 432619166cb2758657c3ea9f025868d196e0399a2ad4772a809cd1e6a248c677: Status 404 returned error can't find the container with id 432619166cb2758657c3ea9f025868d196e0399a2ad4772a809cd1e6a248c677 Sep 30 13:51:35 crc kubenswrapper[4783]: I0930 13:51:35.901818 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t"] Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.913973 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rsvh8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-mms9t_openstack-operators(987e3b87-31ee-4a44-8829-61e239d3945a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.916294 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" podUID="987e3b87-31ee-4a44-8829-61e239d3945a" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.965937 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" podUID="8c02b85f-2ca3-48a6-86ad-01ce12288522" Sep 30 13:51:35 crc kubenswrapper[4783]: E0930 13:51:35.971116 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" podUID="c9f201b9-c0fe-4a29-bcad-6ae49742b3ff" Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.038528 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" podUID="f2e6ebfe-0822-4070-b60c-64974705fd4e" Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.054826 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" podUID="137c2cf3-bda8-4da6-a9c1-d430e636c745" Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.056362 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" podUID="719cc658-fa4f-4ae9-878a-47e8fdc9c2cc" Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.067497 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" podUID="ade21fb8-be1e-4bdd-a8a7-16c6d2124570" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.095574 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" event={"ID":"987e3b87-31ee-4a44-8829-61e239d3945a","Type":"ContainerStarted","Data":"4e760a304b6a5ed0d856498ffb9672d731ddc245d2d8f1277e5afad1053fd2be"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.097104 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" podUID="987e3b87-31ee-4a44-8829-61e239d3945a" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.097612 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" event={"ID":"53bff1a7-c605-4e9e-8311-9157240d03b4","Type":"ContainerStarted","Data":"15d5022303ae8f0f6e323eebb9650033a1da9de2e5895ad1e6bdf84aa2c6233d"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.099770 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" event={"ID":"0c47e365-48fb-43e8-9932-04850ec2344d","Type":"ContainerStarted","Data":"e426c26305140fabbcd41ba4dab6fc3a298d4589be90e983d289a3036cb54540"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.101541 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" event={"ID":"26213271-94ee-4549-99d6-b30ba62e00fc","Type":"ContainerStarted","Data":"ec0826e8888027dd00ef12b5c20f9eef4a088852c86c959b101c2f36fd5c7970"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.102883 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" event={"ID":"58b3d85e-a497-4e26-98ab-89101226c62a","Type":"ContainerStarted","Data":"f820058d942985c4dcb7cffda18a031d4bb0e85db78fe9bef824c332422252a6"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.104896 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" event={"ID":"3b26411c-5b67-4660-9994-0500516afb9e","Type":"ContainerStarted","Data":"968d0022e8b4ead69c465324c88047146d3487be4951a6b6473fa0ac62dbf198"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.107252 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" event={"ID":"8c02b85f-2ca3-48a6-86ad-01ce12288522","Type":"ContainerStarted","Data":"4f0be4f58baca97612aff9a548685c8998f1f34e1a4527c5a7d5b55e70955148"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.107285 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" event={"ID":"8c02b85f-2ca3-48a6-86ad-01ce12288522","Type":"ContainerStarted","Data":"40c849e88551fbe433978e8dee52d6d282ab922b1e5657b803e49231108edcac"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.109047 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" podUID="8c02b85f-2ca3-48a6-86ad-01ce12288522" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.122909 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" event={"ID":"ade21fb8-be1e-4bdd-a8a7-16c6d2124570","Type":"ContainerStarted","Data":"8bef3352fae22fa8411a810d51a72a409145ae2f3394d1c6edf9d83b85bd3d6a"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.122954 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" event={"ID":"ade21fb8-be1e-4bdd-a8a7-16c6d2124570","Type":"ContainerStarted","Data":"a7c196a95f5e356148d9d1c81d587a987fa7d8ac8e10f5978c869f6548030b7c"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.127899 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" podUID="ade21fb8-be1e-4bdd-a8a7-16c6d2124570" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.129597 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" event={"ID":"58ca2541-d4dc-4a6f-9e5f-0ad539e65808","Type":"ContainerStarted","Data":"0e6f7cb4158a29af4685d1feed4f4f74540eb578f2f6850dde7c361e97aa7dff"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.135451 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" event={"ID":"f2e6ebfe-0822-4070-b60c-64974705fd4e","Type":"ContainerStarted","Data":"a82d688d1053e4fb4528aef741e7dd307a00c6b4e3ec93732860071e7693520f"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.135491 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" event={"ID":"f2e6ebfe-0822-4070-b60c-64974705fd4e","Type":"ContainerStarted","Data":"31dd4128f5c289c18fb3efc510d24090b14c5630b1f9662d64c48fdde9e8b9b6"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.136939 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" podUID="f2e6ebfe-0822-4070-b60c-64974705fd4e" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.137568 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" event={"ID":"30aabf9c-e0d4-44ac-ae33-b5f7784941ce","Type":"ContainerStarted","Data":"645c2baf219959d4ac0b50d97532f462dcdea74baebb2b30c4a5ee8c11a24330"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.138345 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" event={"ID":"60247f30-05f5-49e6-81f8-7a91203afa8e","Type":"ContainerStarted","Data":"dae414b5cd7095e95a96c166b593c963739e11636d271ed1a12fa1a193689832"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.140462 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" event={"ID":"d6159fd2-85d7-4d0b-8c24-042468d2cff3","Type":"ContainerStarted","Data":"ca102f027785e68fb36ed5f576207d8d47fc6ca0f82d55f72672549b50914b41"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.145444 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" event={"ID":"c1065d45-bf09-456a-af5c-23f62fb0780c","Type":"ContainerStarted","Data":"4f248061143e3c9f3f956c3a997895d5b6278be300efc57b2b0c7b2d7728120f"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.158150 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" event={"ID":"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa","Type":"ContainerStarted","Data":"fbe1abe5b13355225b37e5c7d005bade6d62602f3e4e7e8bae6fd0a9c7748c32"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.160238 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" event={"ID":"52109482-0b58-40de-a483-cfa4b8e33eee","Type":"ContainerStarted","Data":"c36fae1f6d8df738adceb3a6c1259de1aaa52a165297e86dff6077777ed0b8dd"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.162154 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" event={"ID":"d964b590-5ba0-4c6b-bee8-3c52f4950d9f","Type":"ContainerStarted","Data":"432619166cb2758657c3ea9f025868d196e0399a2ad4772a809cd1e6a248c677"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.178246 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" event={"ID":"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc","Type":"ContainerStarted","Data":"82f1bacf98eceaf0f958993776ecdbc7589fe683af9960224da5e1d30b1ec4b6"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.178292 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" event={"ID":"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc","Type":"ContainerStarted","Data":"1866b801642c7adca172322a9d520a21ff3177c735304474bef1dc2bf5ecdddc"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.179603 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" podUID="719cc658-fa4f-4ae9-878a-47e8fdc9c2cc" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.199109 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" event={"ID":"3b82a357-f2a6-42e3-8d29-88368c0a3e43","Type":"ContainerStarted","Data":"b7341928bed13d8f79358aac6beb4249db9ad2e82cf61935801cde7e133e73ec"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.201421 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" event={"ID":"137c2cf3-bda8-4da6-a9c1-d430e636c745","Type":"ContainerStarted","Data":"602a4306d183f9a5892d826eae2dd6629e1d329d832fded32236a7d71bce247a"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.201455 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" event={"ID":"137c2cf3-bda8-4da6-a9c1-d430e636c745","Type":"ContainerStarted","Data":"c1e1737d90c0546ecbf79ebb70d7adbdc3d8442309d471e8e1b3adc4526aa0bb"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.203559 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" podUID="137c2cf3-bda8-4da6-a9c1-d430e636c745" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.203781 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" event={"ID":"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff","Type":"ContainerStarted","Data":"0b1cf1e2d2072feb5894aa2a1c9527b6a84bf8b799493fe3c4f9a666b052833e"} Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.203805 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" event={"ID":"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff","Type":"ContainerStarted","Data":"7758ef83a4ee1acdc3a98cf23008fb90844a60cd241d5f22c9068203d1e20a5c"} Sep 30 13:51:36 crc kubenswrapper[4783]: E0930 13:51:36.204893 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" podUID="c9f201b9-c0fe-4a29-bcad-6ae49742b3ff" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.477986 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.504592 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/45485bf9-44cc-4f01-a72e-85f9efc22357-cert\") pod \"openstack-operator-controller-manager-7b7bb8bd67-qzlt6\" (UID: \"45485bf9-44cc-4f01-a72e-85f9efc22357\") " pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.637486 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:36 crc kubenswrapper[4783]: I0930 13:51:36.947663 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6"] Sep 30 13:51:37 crc kubenswrapper[4783]: I0930 13:51:37.212398 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" event={"ID":"45485bf9-44cc-4f01-a72e-85f9efc22357","Type":"ContainerStarted","Data":"f7fbed8a5ed03279a522f711cdeb4ace40ced99b1d45f9c483bf68cba6ab73cf"} Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.215433 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" podUID="f2e6ebfe-0822-4070-b60c-64974705fd4e" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216017 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" podUID="719cc658-fa4f-4ae9-878a-47e8fdc9c2cc" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216098 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" podUID="8c02b85f-2ca3-48a6-86ad-01ce12288522" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216166 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" podUID="ade21fb8-be1e-4bdd-a8a7-16c6d2124570" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216504 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" podUID="987e3b87-31ee-4a44-8829-61e239d3945a" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216561 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" podUID="137c2cf3-bda8-4da6-a9c1-d430e636c745" Sep 30 13:51:37 crc kubenswrapper[4783]: E0930 13:51:37.216627 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" podUID="c9f201b9-c0fe-4a29-bcad-6ae49742b3ff" Sep 30 13:51:37 crc kubenswrapper[4783]: I0930 13:51:37.676378 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:51:37 crc kubenswrapper[4783]: I0930 13:51:37.676443 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:51:38 crc kubenswrapper[4783]: I0930 13:51:38.225375 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" event={"ID":"45485bf9-44cc-4f01-a72e-85f9efc22357","Type":"ContainerStarted","Data":"c26ccda068c3889218f061bb48eead34d54338313f527a1bb02413d7abd0321c"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.284255 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" event={"ID":"52109482-0b58-40de-a483-cfa4b8e33eee","Type":"ContainerStarted","Data":"74cbec4263c91dc45fc498e19e493f853524b78dfe72618bb08a16ed1a3abff4"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.293950 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" event={"ID":"0c47e365-48fb-43e8-9932-04850ec2344d","Type":"ContainerStarted","Data":"f91ce8ef8559cf0f0ae1e79a985f1b6e4d1a755c2c85eca56f52596fdcf6c98c"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.304713 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" event={"ID":"d964b590-5ba0-4c6b-bee8-3c52f4950d9f","Type":"ContainerStarted","Data":"253f64c1589affdaf63301ad74f9365347290d39dc4aa3f53230be5666a8726b"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.310450 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" event={"ID":"c1065d45-bf09-456a-af5c-23f62fb0780c","Type":"ContainerStarted","Data":"5c8b14a2db80be000bdfe4aa6dc318951e7a1a1b92e54a6d56ca2de08705a750"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.319851 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" event={"ID":"a3064510-3c7e-4094-a3b6-ae572fba1a95","Type":"ContainerStarted","Data":"3b4b3d9f12d84bad1ea286549f592c504a3ddc42f6a48515c82c1cbde6176ab6"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.327833 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" event={"ID":"30aabf9c-e0d4-44ac-ae33-b5f7784941ce","Type":"ContainerStarted","Data":"a1122c453245b922c783107bfe0016bb0e79f7dbef9de7780c2977b72409362d"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.342756 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" event={"ID":"60247f30-05f5-49e6-81f8-7a91203afa8e","Type":"ContainerStarted","Data":"340e4b30d3c90725fc1f25ed133c27a24e24978551090b7de61c000b03f77264"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.366720 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" event={"ID":"3b26411c-5b67-4660-9994-0500516afb9e","Type":"ContainerStarted","Data":"665af9c6f4ff238755aa55f227f9d85b8a9afd2051a29ca4004bd57f26ce3729"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.376445 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" event={"ID":"45485bf9-44cc-4f01-a72e-85f9efc22357","Type":"ContainerStarted","Data":"a693f58406f6ef89f752fc714f25f9718f3974ae6ba5fbd6619aeecd6ce31748"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.376890 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.386602 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.390909 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" event={"ID":"53bff1a7-c605-4e9e-8311-9157240d03b4","Type":"ContainerStarted","Data":"353f78a211a1cef55bf27983685049da1f92c796b7c09556d40088caf0287fcf"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.394278 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" event={"ID":"d6159fd2-85d7-4d0b-8c24-042468d2cff3","Type":"ContainerStarted","Data":"f3e4e95748c248886a06b9fb027af705172aeee1b6bd2358445b8047c5de8d51"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.412808 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" event={"ID":"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa","Type":"ContainerStarted","Data":"5aee623711e33b0991d0b39c3464262b81b6e615b5762705ba43c5730fb4016d"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.417114 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" event={"ID":"58b3d85e-a497-4e26-98ab-89101226c62a","Type":"ContainerStarted","Data":"aeda14fa1f82998e865bc5c51862201dc59b0f4c77d368643e516ec01825b6f1"} Sep 30 13:51:45 crc kubenswrapper[4783]: I0930 13:51:45.445710 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7b7bb8bd67-qzlt6" podStartSLOduration=11.445695903 podStartE2EDuration="11.445695903s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:51:45.444707731 +0000 UTC m=+1005.376174048" watchObservedRunningTime="2025-09-30 13:51:45.445695903 +0000 UTC m=+1005.377162210" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.432947 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" event={"ID":"58b3d85e-a497-4e26-98ab-89101226c62a","Type":"ContainerStarted","Data":"0a60cf1f0d907f2292a81417f98f1f619249e736101027c8fd35be64b59fd188"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.433108 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.435053 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" event={"ID":"c1065d45-bf09-456a-af5c-23f62fb0780c","Type":"ContainerStarted","Data":"c735ea4be60341c78622c76914ae5dda12dd95f8e5163cad98d63276d1a6f1e7"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.435708 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.437846 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" event={"ID":"a3064510-3c7e-4094-a3b6-ae572fba1a95","Type":"ContainerStarted","Data":"224fd6de0075c2db92b672376bdaf4358c8002619b67c528713773012d478488"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.437917 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.439333 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" event={"ID":"52109482-0b58-40de-a483-cfa4b8e33eee","Type":"ContainerStarted","Data":"13ad8ea275ceddd705af974a95cce422ea296b0ba85679139ac7d4a4a28e110b"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.439480 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.441202 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" event={"ID":"26213271-94ee-4549-99d6-b30ba62e00fc","Type":"ContainerStarted","Data":"a65b85606c5cc11577d552fe5b6056375814abc096729cdd280c5419ff26341a"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.441470 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" event={"ID":"26213271-94ee-4549-99d6-b30ba62e00fc","Type":"ContainerStarted","Data":"79ce084a8ca3c2ba3e5504eec7ecf3c324a04ababecfb3d53d4670327cf9b5b7"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.442052 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.443631 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" event={"ID":"3b82a357-f2a6-42e3-8d29-88368c0a3e43","Type":"ContainerStarted","Data":"302587d3c849fd2ef6b9849fdeddabba718b8a7fa8ad7bd12a2782634b6c52e3"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.443710 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" event={"ID":"3b82a357-f2a6-42e3-8d29-88368c0a3e43","Type":"ContainerStarted","Data":"d48d2404df7b6fe55881fdc3dab69162a5e46d4b451c0932b9d3f78b5223fc47"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.444155 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.445699 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" event={"ID":"53bff1a7-c605-4e9e-8311-9157240d03b4","Type":"ContainerStarted","Data":"400540fc73481d3a2051d14bf7e0186372d784f0be1f4fa72222da1818e1fd0d"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.446420 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.447712 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" event={"ID":"0c47e365-48fb-43e8-9932-04850ec2344d","Type":"ContainerStarted","Data":"ac9674407b4fd93f23937f9517fa13a77e63feda80acae35a8dc70c30705149a"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.448331 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.453304 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" event={"ID":"d964b590-5ba0-4c6b-bee8-3c52f4950d9f","Type":"ContainerStarted","Data":"5758b2c76bdf25165ad4db67d86d619f11d4a6599820032013c324f90cab1569"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.453672 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.455435 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" event={"ID":"60247f30-05f5-49e6-81f8-7a91203afa8e","Type":"ContainerStarted","Data":"1a15d5248fb4281b2a2c6227eadf09d97c2a5a28650b9f69652d63fbdb706e5f"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.455656 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.457323 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" event={"ID":"d6159fd2-85d7-4d0b-8c24-042468d2cff3","Type":"ContainerStarted","Data":"c5f21ed2efc91d60db651d5328830aa8401fb1b64e6dbccb00f7ba4511936654"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.460488 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" event={"ID":"58ca2541-d4dc-4a6f-9e5f-0ad539e65808","Type":"ContainerStarted","Data":"486edee4228bb99dbc44652ae0545e64ab868c60dd472a960ae6c295cfc0daa7"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.460656 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.460741 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" event={"ID":"58ca2541-d4dc-4a6f-9e5f-0ad539e65808","Type":"ContainerStarted","Data":"678cdc2b03b6269d30c5a6f2040dfb0494ebb42f1f9e3f7742c9917baa20140f"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.462173 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" event={"ID":"30aabf9c-e0d4-44ac-ae33-b5f7784941ce","Type":"ContainerStarted","Data":"be5c730fce3e43b611b54bd8691715ea34f26a29b987f4282710b6a03f875309"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.462368 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.462503 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" podStartSLOduration=3.2948019139999998 podStartE2EDuration="12.462491598s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.362114938 +0000 UTC m=+995.293581285" lastFinishedPulling="2025-09-30 13:51:44.529804652 +0000 UTC m=+1004.461270969" observedRunningTime="2025-09-30 13:51:46.457624232 +0000 UTC m=+1006.389090549" watchObservedRunningTime="2025-09-30 13:51:46.462491598 +0000 UTC m=+1006.393957905" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.464445 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" event={"ID":"3b26411c-5b67-4660-9994-0500516afb9e","Type":"ContainerStarted","Data":"9706659c7393f03ada5de0a2a2a7e0289faf8d40ebc2a1c27e9e6da47777a66e"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.464834 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.467309 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" event={"ID":"fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa","Type":"ContainerStarted","Data":"d35d21ce5bbe9756855d6c18acd513d81c89334df08f8a76c07d58143673e3eb"} Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.467444 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.490501 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" podStartSLOduration=4.366357026 podStartE2EDuration="13.490475144s" podCreationTimestamp="2025-09-30 13:51:33 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.358666848 +0000 UTC m=+995.290133155" lastFinishedPulling="2025-09-30 13:51:44.482784956 +0000 UTC m=+1004.414251273" observedRunningTime="2025-09-30 13:51:46.488506851 +0000 UTC m=+1006.419973158" watchObservedRunningTime="2025-09-30 13:51:46.490475144 +0000 UTC m=+1006.421941451" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.550923 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" podStartSLOduration=3.739244552 podStartE2EDuration="12.55090695s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.72988293 +0000 UTC m=+995.661349237" lastFinishedPulling="2025-09-30 13:51:44.541545318 +0000 UTC m=+1004.473011635" observedRunningTime="2025-09-30 13:51:46.549653 +0000 UTC m=+1006.481119317" watchObservedRunningTime="2025-09-30 13:51:46.55090695 +0000 UTC m=+1006.482373257" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.551935 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" podStartSLOduration=3.770570576 podStartE2EDuration="12.551928963s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.748821537 +0000 UTC m=+995.680287844" lastFinishedPulling="2025-09-30 13:51:44.530179924 +0000 UTC m=+1004.461646231" observedRunningTime="2025-09-30 13:51:46.525783835 +0000 UTC m=+1006.457250142" watchObservedRunningTime="2025-09-30 13:51:46.551928963 +0000 UTC m=+1006.483395270" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.571611 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" podStartSLOduration=4.333157643 podStartE2EDuration="13.571595213s" podCreationTimestamp="2025-09-30 13:51:33 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.245013937 +0000 UTC m=+995.176480244" lastFinishedPulling="2025-09-30 13:51:44.483451487 +0000 UTC m=+1004.414917814" observedRunningTime="2025-09-30 13:51:46.568250435 +0000 UTC m=+1006.499716752" watchObservedRunningTime="2025-09-30 13:51:46.571595213 +0000 UTC m=+1006.503061540" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.593773 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" podStartSLOduration=3.7797857500000003 podStartE2EDuration="12.593758723s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.714476356 +0000 UTC m=+995.645942673" lastFinishedPulling="2025-09-30 13:51:44.528449339 +0000 UTC m=+1004.459915646" observedRunningTime="2025-09-30 13:51:46.591549292 +0000 UTC m=+1006.523015599" watchObservedRunningTime="2025-09-30 13:51:46.593758723 +0000 UTC m=+1006.525225030" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.610211 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" podStartSLOduration=3.8143785279999998 podStartE2EDuration="12.610196499s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.748483806 +0000 UTC m=+995.679950113" lastFinishedPulling="2025-09-30 13:51:44.544301777 +0000 UTC m=+1004.475768084" observedRunningTime="2025-09-30 13:51:46.608121893 +0000 UTC m=+1006.539588200" watchObservedRunningTime="2025-09-30 13:51:46.610196499 +0000 UTC m=+1006.541662806" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.667364 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" podStartSLOduration=3.858070459 podStartE2EDuration="12.66734567s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.732182994 +0000 UTC m=+995.663649291" lastFinishedPulling="2025-09-30 13:51:44.541458205 +0000 UTC m=+1004.472924502" observedRunningTime="2025-09-30 13:51:46.627829904 +0000 UTC m=+1006.559296221" watchObservedRunningTime="2025-09-30 13:51:46.66734567 +0000 UTC m=+1006.598811977" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.671321 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" podStartSLOduration=3.554161222 podStartE2EDuration="12.671310087s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.440271512 +0000 UTC m=+995.371737819" lastFinishedPulling="2025-09-30 13:51:44.557420367 +0000 UTC m=+1004.488886684" observedRunningTime="2025-09-30 13:51:46.664737467 +0000 UTC m=+1006.596203794" watchObservedRunningTime="2025-09-30 13:51:46.671310087 +0000 UTC m=+1006.602776394" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.696205 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" podStartSLOduration=3.943734048 podStartE2EDuration="13.696187524s" podCreationTimestamp="2025-09-30 13:51:33 +0000 UTC" firstStartedPulling="2025-09-30 13:51:34.730101792 +0000 UTC m=+994.661568089" lastFinishedPulling="2025-09-30 13:51:44.482555268 +0000 UTC m=+1004.414021565" observedRunningTime="2025-09-30 13:51:46.691624008 +0000 UTC m=+1006.623090335" watchObservedRunningTime="2025-09-30 13:51:46.696187524 +0000 UTC m=+1006.627653831" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.711305 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" podStartSLOduration=4.077208718 podStartE2EDuration="12.711289208s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.907418427 +0000 UTC m=+995.838884734" lastFinishedPulling="2025-09-30 13:51:44.541498907 +0000 UTC m=+1004.472965224" observedRunningTime="2025-09-30 13:51:46.708880591 +0000 UTC m=+1006.640346898" watchObservedRunningTime="2025-09-30 13:51:46.711289208 +0000 UTC m=+1006.642755515" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.760604 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" podStartSLOduration=3.6334696920000003 podStartE2EDuration="12.760575246s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.430569001 +0000 UTC m=+995.362035308" lastFinishedPulling="2025-09-30 13:51:44.557674535 +0000 UTC m=+1004.489140862" observedRunningTime="2025-09-30 13:51:46.742417145 +0000 UTC m=+1006.673883582" watchObservedRunningTime="2025-09-30 13:51:46.760575246 +0000 UTC m=+1006.692041553" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.761785 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" podStartSLOduration=3.921919543 podStartE2EDuration="12.761776765s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.738393852 +0000 UTC m=+995.669860159" lastFinishedPulling="2025-09-30 13:51:44.578251074 +0000 UTC m=+1004.509717381" observedRunningTime="2025-09-30 13:51:46.757073495 +0000 UTC m=+1006.688539802" watchObservedRunningTime="2025-09-30 13:51:46.761776765 +0000 UTC m=+1006.693243082" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.774682 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" podStartSLOduration=4.397273396 podStartE2EDuration="13.774648367s" podCreationTimestamp="2025-09-30 13:51:33 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.105290571 +0000 UTC m=+995.036756878" lastFinishedPulling="2025-09-30 13:51:44.482665502 +0000 UTC m=+1004.414131849" observedRunningTime="2025-09-30 13:51:46.772334313 +0000 UTC m=+1006.703800620" watchObservedRunningTime="2025-09-30 13:51:46.774648367 +0000 UTC m=+1006.706114664" Sep 30 13:51:46 crc kubenswrapper[4783]: I0930 13:51:46.794176 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" podStartSLOduration=4.674035413 podStartE2EDuration="13.794151812s" podCreationTimestamp="2025-09-30 13:51:33 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.36246468 +0000 UTC m=+995.293930987" lastFinishedPulling="2025-09-30 13:51:44.482581079 +0000 UTC m=+1004.414047386" observedRunningTime="2025-09-30 13:51:46.791860039 +0000 UTC m=+1006.723326366" watchObservedRunningTime="2025-09-30 13:51:46.794151812 +0000 UTC m=+1006.725618129" Sep 30 13:51:47 crc kubenswrapper[4783]: I0930 13:51:47.478456 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.280705 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-bd2g9" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.291066 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-npt95" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.320258 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-5x5rg" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.342687 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-d7vtg" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.361033 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-8qnst" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.414952 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-l2n7h" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.478996 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6f589bc7f7-t2dkk" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.535743 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-prn5v" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.668287 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-657c6b68c7-56wct" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.707920 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-mvkjc" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.725854 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-vd9cw" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.755195 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6b96467f46-8l52m" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.757388 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d9c7d9477-z27gn" Sep 30 13:51:54 crc kubenswrapper[4783]: I0930 13:51:54.787539 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79f9fc9fd8-lpmr4" Sep 30 13:51:55 crc kubenswrapper[4783]: I0930 13:51:55.024985 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75756dd4d9-ntrpm" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.563997 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" event={"ID":"987e3b87-31ee-4a44-8829-61e239d3945a","Type":"ContainerStarted","Data":"9935edc21a4901a05dae3ef1321f611d67dc4e03138719ddcfab21be0ec8bfa2"} Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.565161 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" event={"ID":"f2e6ebfe-0822-4070-b60c-64974705fd4e","Type":"ContainerStarted","Data":"3f589039b5ff10dc72735fa5ef64afed78c41286140da83d0730d7b547e8a442"} Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.565600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.567772 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" event={"ID":"719cc658-fa4f-4ae9-878a-47e8fdc9c2cc","Type":"ContainerStarted","Data":"48662f16c229e67bee0c9849b49c68808a24098c45fd9da949af498748052bdb"} Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.568194 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.570138 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" event={"ID":"137c2cf3-bda8-4da6-a9c1-d430e636c745","Type":"ContainerStarted","Data":"73e202867d8ac583063bc4324de9c2fa031ccf5a62e39a37e9edad9ddab6d034"} Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.570593 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.572415 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" event={"ID":"c9f201b9-c0fe-4a29-bcad-6ae49742b3ff","Type":"ContainerStarted","Data":"f40aa32eac9f5c4c0b79fcb7076ca7178cf46278049e4a8c7096dd59bab30440"} Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.572787 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.586757 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-mms9t" podStartSLOduration=4.4960012240000005 podStartE2EDuration="23.58673883s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.913855224 +0000 UTC m=+995.845321531" lastFinishedPulling="2025-09-30 13:51:55.00459283 +0000 UTC m=+1014.936059137" observedRunningTime="2025-09-30 13:51:57.580577373 +0000 UTC m=+1017.512043670" watchObservedRunningTime="2025-09-30 13:51:57.58673883 +0000 UTC m=+1017.518205137" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.607074 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" podStartSLOduration=2.901278937 podStartE2EDuration="23.607054471s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.752056981 +0000 UTC m=+995.683523288" lastFinishedPulling="2025-09-30 13:51:56.457832505 +0000 UTC m=+1016.389298822" observedRunningTime="2025-09-30 13:51:57.603677913 +0000 UTC m=+1017.535144220" watchObservedRunningTime="2025-09-30 13:51:57.607054471 +0000 UTC m=+1017.538520778" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.621283 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" podStartSLOduration=2.955600776 podStartE2EDuration="23.621267867s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.81980592 +0000 UTC m=+995.751272227" lastFinishedPulling="2025-09-30 13:51:56.485473001 +0000 UTC m=+1016.416939318" observedRunningTime="2025-09-30 13:51:57.617387182 +0000 UTC m=+1017.548853479" watchObservedRunningTime="2025-09-30 13:51:57.621267867 +0000 UTC m=+1017.552734164" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.639055 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" podStartSLOduration=3.102090899 podStartE2EDuration="23.639037256s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.875746912 +0000 UTC m=+995.807213219" lastFinishedPulling="2025-09-30 13:51:56.412693259 +0000 UTC m=+1016.344159576" observedRunningTime="2025-09-30 13:51:57.633725895 +0000 UTC m=+1017.565192222" watchObservedRunningTime="2025-09-30 13:51:57.639037256 +0000 UTC m=+1017.570503573" Sep 30 13:51:57 crc kubenswrapper[4783]: I0930 13:51:57.653332 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" podStartSLOduration=3.061469558 podStartE2EDuration="23.653265081s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.866194097 +0000 UTC m=+995.797660404" lastFinishedPulling="2025-09-30 13:51:56.45798961 +0000 UTC m=+1016.389455927" observedRunningTime="2025-09-30 13:51:57.647800756 +0000 UTC m=+1017.579267063" watchObservedRunningTime="2025-09-30 13:51:57.653265081 +0000 UTC m=+1017.584731408" Sep 30 13:51:58 crc kubenswrapper[4783]: I0930 13:51:58.580906 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" event={"ID":"ade21fb8-be1e-4bdd-a8a7-16c6d2124570","Type":"ContainerStarted","Data":"47649a8d5614f31f83b7392ad2022ee85477f66cd68928387db8635533aac5db"} Sep 30 13:51:58 crc kubenswrapper[4783]: I0930 13:51:58.581685 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:51:58 crc kubenswrapper[4783]: I0930 13:51:58.584033 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" event={"ID":"8c02b85f-2ca3-48a6-86ad-01ce12288522","Type":"ContainerStarted","Data":"5cfb2f4b66d71d7425cb7b3f6dd126b73a4c5fb6f89cb455c55d6dd44cdfff72"} Sep 30 13:51:58 crc kubenswrapper[4783]: I0930 13:51:58.614546 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" podStartSLOduration=2.60391639 podStartE2EDuration="24.614516696s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.849359948 +0000 UTC m=+995.780826255" lastFinishedPulling="2025-09-30 13:51:57.859960224 +0000 UTC m=+1017.791426561" observedRunningTime="2025-09-30 13:51:58.611415657 +0000 UTC m=+1018.542881984" watchObservedRunningTime="2025-09-30 13:51:58.614516696 +0000 UTC m=+1018.545983043" Sep 30 13:51:58 crc kubenswrapper[4783]: I0930 13:51:58.632622 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" podStartSLOduration=2.023179966 podStartE2EDuration="24.632604276s" podCreationTimestamp="2025-09-30 13:51:34 +0000 UTC" firstStartedPulling="2025-09-30 13:51:35.754906362 +0000 UTC m=+995.686372669" lastFinishedPulling="2025-09-30 13:51:58.364330672 +0000 UTC m=+1018.295796979" observedRunningTime="2025-09-30 13:51:58.631023535 +0000 UTC m=+1018.562489892" watchObservedRunningTime="2025-09-30 13:51:58.632604276 +0000 UTC m=+1018.564070583" Sep 30 13:52:04 crc kubenswrapper[4783]: I0930 13:52:04.800451 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6fb7d6b8bf-h9b4s" Sep 30 13:52:04 crc kubenswrapper[4783]: I0930 13:52:04.834107 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:52:04 crc kubenswrapper[4783]: I0930 13:52:04.841600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-59mrh" Sep 30 13:52:04 crc kubenswrapper[4783]: I0930 13:52:04.960446 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-q6vmx" Sep 30 13:52:04 crc kubenswrapper[4783]: I0930 13:52:04.997597 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-ml9lk" Sep 30 13:52:05 crc kubenswrapper[4783]: I0930 13:52:05.015145 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6bb97fcf96-nk8b4" Sep 30 13:52:05 crc kubenswrapper[4783]: I0930 13:52:05.419909 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq" Sep 30 13:52:07 crc kubenswrapper[4783]: I0930 13:52:07.674751 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:52:07 crc kubenswrapper[4783]: I0930 13:52:07.675132 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:52:07 crc kubenswrapper[4783]: I0930 13:52:07.675185 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:52:07 crc kubenswrapper[4783]: I0930 13:52:07.676027 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:52:07 crc kubenswrapper[4783]: I0930 13:52:07.676103 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc" gracePeriod=600 Sep 30 13:52:08 crc kubenswrapper[4783]: I0930 13:52:08.688128 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc" exitCode=0 Sep 30 13:52:08 crc kubenswrapper[4783]: I0930 13:52:08.688236 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc"} Sep 30 13:52:08 crc kubenswrapper[4783]: I0930 13:52:08.688568 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8"} Sep 30 13:52:08 crc kubenswrapper[4783]: I0930 13:52:08.688599 4783 scope.go:117] "RemoveContainer" containerID="f5f4180b203e0bfb30a34850cbda7cc99e36cdce653441931fda65fe6cf8a779" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.397905 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.400014 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.404136 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.404348 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-5s47q" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.404521 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.405847 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.409934 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.415141 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.415202 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kqdj\" (UniqueName: \"kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.495465 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.496969 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.498903 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.507104 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.517002 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.517073 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kqdj\" (UniqueName: \"kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.517105 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.517141 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.517173 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fm4b\" (UniqueName: \"kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.518065 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.546887 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kqdj\" (UniqueName: \"kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj\") pod \"dnsmasq-dns-b8b69cf79-cg7gl\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.617928 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.618011 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.618046 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fm4b\" (UniqueName: \"kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.618850 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.618926 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.634856 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fm4b\" (UniqueName: \"kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b\") pod \"dnsmasq-dns-d5f6f49c7-wt75z\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.718889 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:22 crc kubenswrapper[4783]: I0930 13:52:22.810611 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.171819 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.235450 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:23 crc kubenswrapper[4783]: W0930 13:52:23.242021 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ab08fb7_6b95_4f2c_acb0_15c92172ca40.slice/crio-8dd37e2165aa7d9c2afb7b91286343f90e07a2f26e016e690300d5ab8541e083 WatchSource:0}: Error finding container 8dd37e2165aa7d9c2afb7b91286343f90e07a2f26e016e690300d5ab8541e083: Status 404 returned error can't find the container with id 8dd37e2165aa7d9c2afb7b91286343f90e07a2f26e016e690300d5ab8541e083 Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.411810 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.427471 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.428840 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.438655 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.532820 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.532886 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6mx5\" (UniqueName: \"kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.533060 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.633921 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.633984 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6mx5\" (UniqueName: \"kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.634034 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.634903 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.634993 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.653551 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6mx5\" (UniqueName: \"kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5\") pod \"dnsmasq-dns-7869668685-5bt7n\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.754584 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.815308 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" event={"ID":"0ab08fb7-6b95-4f2c-acb0-15c92172ca40","Type":"ContainerStarted","Data":"8dd37e2165aa7d9c2afb7b91286343f90e07a2f26e016e690300d5ab8541e083"} Sep 30 13:52:23 crc kubenswrapper[4783]: I0930 13:52:23.816147 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" event={"ID":"448163b8-6add-4db4-bfdf-071fc52606cd","Type":"ContainerStarted","Data":"856cf7f6a6b437c408002dc9846da58fbf08f05c9e72082b9c076e9d1ae5c162"} Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.178324 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.326213 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.351063 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.352208 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.364556 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.445699 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2trm\" (UniqueName: \"kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.445772 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.445822 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.550279 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2trm\" (UniqueName: \"kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.550353 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.550420 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.551468 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.552426 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.578800 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.580154 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.584346 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.584415 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dq4kr" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.584733 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.584781 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.584847 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.585754 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.585777 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.586163 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2trm\" (UniqueName: \"kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm\") pod \"dnsmasq-dns-77795d58f5-rbpq5\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.591948 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.675620 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.753784 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754066 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754151 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754296 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754366 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754388 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq4ll\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754520 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754554 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754643 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754670 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.754691 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.828578 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7869668685-5bt7n" event={"ID":"a6707a42-ffcc-431c-9631-2e98348946a5","Type":"ContainerStarted","Data":"cae58a3f2050f758d59edfa2cc93e4ef15e36b06d32b994b638ce30db8e432a1"} Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855374 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855411 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855427 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855468 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855500 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855516 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855534 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855559 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855577 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq4ll\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855600 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855617 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.855990 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.856254 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.857465 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.858404 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.858776 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.859020 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.862405 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.868517 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.870764 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.884695 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.886487 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.890597 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq4ll\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll\") pod \"rabbitmq-cell1-server-0\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:24 crc kubenswrapper[4783]: I0930 13:52:24.916755 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.248431 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:25 crc kubenswrapper[4783]: W0930 13:52:25.265603 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d64991_7de8_4788_afe9_d95c7b72ddd4.slice/crio-343da532930a0b28b92ff2e4e66aa3d145e373cc0c7c615221661ff469f040c0 WatchSource:0}: Error finding container 343da532930a0b28b92ff2e4e66aa3d145e373cc0c7c615221661ff469f040c0: Status 404 returned error can't find the container with id 343da532930a0b28b92ff2e4e66aa3d145e373cc0c7c615221661ff469f040c0 Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.467707 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.470755 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.472359 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.474666 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-d6lgh" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.478178 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.478314 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.478396 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.478922 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.479043 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.488694 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.501016 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:52:25 crc kubenswrapper[4783]: W0930 13:52:25.515521 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb901a1db_0fb0_4d58_be99_fdfd812683e6.slice/crio-ef4a96baff801791ce5d6008155172f208e1d40f394f83e020f4fde86d71417c WatchSource:0}: Error finding container ef4a96baff801791ce5d6008155172f208e1d40f394f83e020f4fde86d71417c: Status 404 returned error can't find the container with id ef4a96baff801791ce5d6008155172f208e1d40f394f83e020f4fde86d71417c Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566502 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566580 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566610 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566631 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566650 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566667 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566681 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566707 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566726 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckr6l\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566755 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.566781 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.667946 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.667986 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668011 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668029 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668044 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668058 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668083 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668101 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckr6l\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668129 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668146 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668189 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.668745 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.669243 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.669488 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.669918 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.670343 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.670536 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.682449 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.682937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.685117 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.707086 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.711352 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckr6l\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.720127 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.801753 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.856031 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerStarted","Data":"ef4a96baff801791ce5d6008155172f208e1d40f394f83e020f4fde86d71417c"} Sep 30 13:52:25 crc kubenswrapper[4783]: I0930 13:52:25.868390 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" event={"ID":"f4d64991-7de8-4788-afe9-d95c7b72ddd4","Type":"ContainerStarted","Data":"343da532930a0b28b92ff2e4e66aa3d145e373cc0c7c615221661ff469f040c0"} Sep 30 13:52:26 crc kubenswrapper[4783]: I0930 13:52:26.297815 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:52:26 crc kubenswrapper[4783]: W0930 13:52:26.306507 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod164c5743_32f5_4347_9c9d_20d28f1f2dce.slice/crio-db571d3067270dbf8cbe55cc5436507182e78f1ae3928b3b060ffdd16a212abf WatchSource:0}: Error finding container db571d3067270dbf8cbe55cc5436507182e78f1ae3928b3b060ffdd16a212abf: Status 404 returned error can't find the container with id db571d3067270dbf8cbe55cc5436507182e78f1ae3928b3b060ffdd16a212abf Sep 30 13:52:26 crc kubenswrapper[4783]: I0930 13:52:26.878113 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerStarted","Data":"db571d3067270dbf8cbe55cc5436507182e78f1ae3928b3b060ffdd16a212abf"} Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.158141 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.160088 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.162583 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.163654 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-gq8rj" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.163820 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.163975 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.164082 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.165174 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.168029 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.302948 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303457 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303537 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89n9f\" (UniqueName: \"kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303605 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303663 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303700 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303735 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303751 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.303772 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404508 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404571 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404591 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404611 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404634 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404689 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404714 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89n9f\" (UniqueName: \"kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404740 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.404759 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.405105 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.405661 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.406556 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.406561 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.407784 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.416889 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.421344 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.422205 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89n9f\" (UniqueName: \"kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.426837 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.431345 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " pod="openstack/openstack-galera-0" Sep 30 13:52:27 crc kubenswrapper[4783]: I0930 13:52:27.482147 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.036601 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.038426 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.042487 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-kv6b7" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.042743 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.042897 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.043209 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.052404 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118124 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwj2k\" (UniqueName: \"kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118260 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118292 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118314 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118347 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118363 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118485 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118513 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.118540 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220147 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwj2k\" (UniqueName: \"kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220216 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220247 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220265 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220299 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220315 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220349 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220376 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220405 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.220797 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.221071 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.221517 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.221819 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.221833 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.236767 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.237298 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.243560 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwj2k\" (UniqueName: \"kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.244626 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.258766 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.362257 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.512528 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.513866 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.516664 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.517235 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-8kfqt" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.520610 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.523326 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.625339 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.625753 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.625789 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb5kd\" (UniqueName: \"kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.625809 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.626077 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.727815 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.727869 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.727922 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.727952 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb5kd\" (UniqueName: \"kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.727972 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.728860 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.729118 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.741700 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.748255 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.752944 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb5kd\" (UniqueName: \"kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd\") pod \"memcached-0\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " pod="openstack/memcached-0" Sep 30 13:52:28 crc kubenswrapper[4783]: I0930 13:52:28.854988 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.119729 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.120865 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.125463 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-zdmx6" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.144693 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.253641 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grks2\" (UniqueName: \"kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2\") pod \"kube-state-metrics-0\" (UID: \"7732b547-1797-4164-ad03-6c76c2c1f207\") " pod="openstack/kube-state-metrics-0" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.355101 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grks2\" (UniqueName: \"kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2\") pod \"kube-state-metrics-0\" (UID: \"7732b547-1797-4164-ad03-6c76c2c1f207\") " pod="openstack/kube-state-metrics-0" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.378840 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grks2\" (UniqueName: \"kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2\") pod \"kube-state-metrics-0\" (UID: \"7732b547-1797-4164-ad03-6c76c2c1f207\") " pod="openstack/kube-state-metrics-0" Sep 30 13:52:30 crc kubenswrapper[4783]: I0930 13:52:30.446594 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.984904 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.987176 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.994032 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.994273 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.994618 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-5wpz7" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.994688 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.994616 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 13:52:33 crc kubenswrapper[4783]: I0930 13:52:33.996883 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.065908 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.072461 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.074025 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.077353 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.079293 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-4plwv" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.107984 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117447 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117494 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117529 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117573 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117610 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117625 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117642 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc26w\" (UniqueName: \"kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.117683 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.124467 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.126469 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.141604 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219340 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219398 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219458 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219492 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219515 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219539 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc26w\" (UniqueName: \"kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219563 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219588 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgs4w\" (UniqueName: \"kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219611 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219642 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219681 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219710 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219733 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219758 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219776 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219793 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219811 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219833 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219849 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5t44\" (UniqueName: \"kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219866 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.219882 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.220740 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.221612 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.221693 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.222812 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.231072 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.249592 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.250289 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.265071 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.265595 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc26w\" (UniqueName: \"kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w\") pod \"ovsdbserver-nb-0\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.319574 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.320619 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.320649 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.320669 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321127 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321256 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5t44\" (UniqueName: \"kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321278 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321592 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321743 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.321798 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322127 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322191 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322208 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322242 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322260 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgs4w\" (UniqueName: \"kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322276 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322343 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322477 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322576 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322678 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.322752 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.323207 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.324524 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.325152 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.326237 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.343666 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5t44\" (UniqueName: \"kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44\") pod \"ovn-controller-ovs-ttc29\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.344450 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgs4w\" (UniqueName: \"kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w\") pod \"ovn-controller-th6r6\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.388545 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6" Sep 30 13:52:34 crc kubenswrapper[4783]: I0930 13:52:34.447185 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.116031 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.118781 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.122095 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.122179 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-bv49d" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.122632 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.122719 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.133486 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189429 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189502 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189589 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189787 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189855 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.189959 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw46f\" (UniqueName: \"kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.190169 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.190248 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.291799 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.291844 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.291869 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.291923 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.291973 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292013 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw46f\" (UniqueName: \"kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292059 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292075 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292105 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292444 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.292706 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.293466 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.298872 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.306811 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.308969 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.314316 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw46f\" (UniqueName: \"kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.314518 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:38 crc kubenswrapper[4783]: I0930 13:52:38.451324 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.372673 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.373145 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2kqdj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-b8b69cf79-cg7gl_openstack(448163b8-6add-4db4-bfdf-071fc52606cd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.374310 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" podUID="448163b8-6add-4db4-bfdf-071fc52606cd" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.433306 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.434210 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t6mx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7869668685-5bt7n_openstack(a6707a42-ffcc-431c-9631-2e98348946a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.435922 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7869668685-5bt7n" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.442316 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.442519 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2fm4b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-d5f6f49c7-wt75z_openstack(0ab08fb7-6b95-4f2c-acb0-15c92172ca40): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.444415 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" podUID="0ab08fb7-6b95-4f2c-acb0-15c92172ca40" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.551584 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.551758 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2trm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-77795d58f5-rbpq5_openstack(f4d64991-7de8-4788-afe9-d95c7b72ddd4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:52:40 crc kubenswrapper[4783]: E0930 13:52:40.552942 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" podUID="f4d64991-7de8-4788-afe9-d95c7b72ddd4" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.009764 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerStarted","Data":"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b"} Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.022800 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerStarted","Data":"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325"} Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.024879 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:52:41 crc kubenswrapper[4783]: E0930 13:52:41.025070 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b\\\"\"" pod="openstack/dnsmasq-dns-7869668685-5bt7n" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" Sep 30 13:52:41 crc kubenswrapper[4783]: E0930 13:52:41.025169 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b\\\"\"" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" podUID="f4d64991-7de8-4788-afe9-d95c7b72ddd4" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.040768 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.058564 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.069786 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.082414 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.253294 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.280168 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:52:41 crc kubenswrapper[4783]: W0930 13:52:41.290979 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7ef9466_e9f5_467e_9b43_2b7952e5b479.slice/crio-423c2d5f5b8ee644c4e5bf433d96b66344cb5f5a915e05866fa7794d185349bf WatchSource:0}: Error finding container 423c2d5f5b8ee644c4e5bf433d96b66344cb5f5a915e05866fa7794d185349bf: Status 404 returned error can't find the container with id 423c2d5f5b8ee644c4e5bf433d96b66344cb5f5a915e05866fa7794d185349bf Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.459041 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.466315 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:41 crc kubenswrapper[4783]: W0930 13:52:41.473780 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61f71f56_b66e_46a2_a0c5_25d0477db0a2.slice/crio-7fe8b7a26f633c58303ac99c85d730b53e3e8d3098e0808c7bb981ad69f17e68 WatchSource:0}: Error finding container 7fe8b7a26f633c58303ac99c85d730b53e3e8d3098e0808c7bb981ad69f17e68: Status 404 returned error can't find the container with id 7fe8b7a26f633c58303ac99c85d730b53e3e8d3098e0808c7bb981ad69f17e68 Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.550336 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config\") pod \"448163b8-6add-4db4-bfdf-071fc52606cd\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.550419 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kqdj\" (UniqueName: \"kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj\") pod \"448163b8-6add-4db4-bfdf-071fc52606cd\" (UID: \"448163b8-6add-4db4-bfdf-071fc52606cd\") " Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.551310 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config" (OuterVolumeSpecName: "config") pod "448163b8-6add-4db4-bfdf-071fc52606cd" (UID: "448163b8-6add-4db4-bfdf-071fc52606cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.556595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj" (OuterVolumeSpecName: "kube-api-access-2kqdj") pod "448163b8-6add-4db4-bfdf-071fc52606cd" (UID: "448163b8-6add-4db4-bfdf-071fc52606cd"). InnerVolumeSpecName "kube-api-access-2kqdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.560826 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.651700 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fm4b\" (UniqueName: \"kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b\") pod \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.651756 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc\") pod \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.651793 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config\") pod \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\" (UID: \"0ab08fb7-6b95-4f2c-acb0-15c92172ca40\") " Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.652186 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0ab08fb7-6b95-4f2c-acb0-15c92172ca40" (UID: "0ab08fb7-6b95-4f2c-acb0-15c92172ca40"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.652297 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448163b8-6add-4db4-bfdf-071fc52606cd-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.652317 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kqdj\" (UniqueName: \"kubernetes.io/projected/448163b8-6add-4db4-bfdf-071fc52606cd-kube-api-access-2kqdj\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.652517 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config" (OuterVolumeSpecName: "config") pod "0ab08fb7-6b95-4f2c-acb0-15c92172ca40" (UID: "0ab08fb7-6b95-4f2c-acb0-15c92172ca40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.657183 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b" (OuterVolumeSpecName: "kube-api-access-2fm4b") pod "0ab08fb7-6b95-4f2c-acb0-15c92172ca40" (UID: "0ab08fb7-6b95-4f2c-acb0-15c92172ca40"). InnerVolumeSpecName "kube-api-access-2fm4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.753524 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fm4b\" (UniqueName: \"kubernetes.io/projected/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-kube-api-access-2fm4b\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.753570 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:41 crc kubenswrapper[4783]: I0930 13:52:41.753584 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab08fb7-6b95-4f2c-acb0-15c92172ca40-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.032561 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7732b547-1797-4164-ad03-6c76c2c1f207","Type":"ContainerStarted","Data":"f1ca784a1b258d08929882e0ec37237510d87717ca2a8c2b5c4059c9fefaefd9"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.033957 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.033997 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5f6f49c7-wt75z" event={"ID":"0ab08fb7-6b95-4f2c-acb0-15c92172ca40","Type":"ContainerDied","Data":"8dd37e2165aa7d9c2afb7b91286343f90e07a2f26e016e690300d5ab8541e083"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.035117 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fd739820-88fe-4dc4-9ff6-1dcbee461751","Type":"ContainerStarted","Data":"4088d7b5981065d01c1247da2eab6e658ebe3646d5eac21da832c69a1e4dec67"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.036107 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerStarted","Data":"423c2d5f5b8ee644c4e5bf433d96b66344cb5f5a915e05866fa7794d185349bf"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.037365 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerStarted","Data":"7fe8b7a26f633c58303ac99c85d730b53e3e8d3098e0808c7bb981ad69f17e68"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.038982 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerStarted","Data":"4edfe9588d5b6bcd8205f603bfe106257e8c8e26615234021c5ccdf0643d19cc"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.040305 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerStarted","Data":"7ad050a85df332e98d323bde18a1d2de74abba3f9a6f236b61b32e08330992c9"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.041247 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" event={"ID":"448163b8-6add-4db4-bfdf-071fc52606cd","Type":"ContainerDied","Data":"856cf7f6a6b437c408002dc9846da58fbf08f05c9e72082b9c076e9d1ae5c162"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.041307 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-cg7gl" Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.048284 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6" event={"ID":"8bc852c2-c59b-4b84-bbfc-c8b62354c66d","Type":"ContainerStarted","Data":"72db217779a4ed932fceb8bf2af004c40cab7a5267f018e136fe53a7f4b832f6"} Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.134122 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.140383 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-cg7gl"] Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.152799 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.159195 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-wt75z"] Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.194336 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.853569 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ab08fb7-6b95-4f2c-acb0-15c92172ca40" path="/var/lib/kubelet/pods/0ab08fb7-6b95-4f2c-acb0-15c92172ca40/volumes" Sep 30 13:52:42 crc kubenswrapper[4783]: I0930 13:52:42.854204 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="448163b8-6add-4db4-bfdf-071fc52606cd" path="/var/lib/kubelet/pods/448163b8-6add-4db4-bfdf-071fc52606cd/volumes" Sep 30 13:52:43 crc kubenswrapper[4783]: I0930 13:52:43.056146 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerStarted","Data":"4309f12cac37ee43c3a3e5a9252f2ea16a630cfb0d101c58b761a48bfbad750a"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.109261 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fd739820-88fe-4dc4-9ff6-1dcbee461751","Type":"ContainerStarted","Data":"f82c79b1e83d1c85b4c136d548ea314373472d27f8b04f8100616ab361665eef"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.109832 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.111445 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerStarted","Data":"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.112837 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6" event={"ID":"8bc852c2-c59b-4b84-bbfc-c8b62354c66d","Type":"ContainerStarted","Data":"78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.112961 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-th6r6" Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.114250 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerStarted","Data":"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.115373 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerStarted","Data":"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.117162 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerStarted","Data":"fcc76b379c0fd4e6d39b37b60874fb17163db1dfdebc6baf9355f72a722f6cb8"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.118465 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7732b547-1797-4164-ad03-6c76c2c1f207","Type":"ContainerStarted","Data":"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.118537 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.119797 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerStarted","Data":"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d"} Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.136410 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.555508167 podStartE2EDuration="21.13639411s" podCreationTimestamp="2025-09-30 13:52:28 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.040541334 +0000 UTC m=+1060.972007641" lastFinishedPulling="2025-09-30 13:52:47.621427277 +0000 UTC m=+1067.552893584" observedRunningTime="2025-09-30 13:52:49.132691991 +0000 UTC m=+1069.064158298" watchObservedRunningTime="2025-09-30 13:52:49.13639411 +0000 UTC m=+1069.067860417" Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.190528 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.641378532 podStartE2EDuration="19.190512224s" podCreationTimestamp="2025-09-30 13:52:30 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.042463115 +0000 UTC m=+1060.973929422" lastFinishedPulling="2025-09-30 13:52:48.591596807 +0000 UTC m=+1068.523063114" observedRunningTime="2025-09-30 13:52:49.182196257 +0000 UTC m=+1069.113662564" watchObservedRunningTime="2025-09-30 13:52:49.190512224 +0000 UTC m=+1069.121978531" Sep 30 13:52:49 crc kubenswrapper[4783]: I0930 13:52:49.218188 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-th6r6" podStartSLOduration=8.01214599 podStartE2EDuration="15.21817229s" podCreationTimestamp="2025-09-30 13:52:34 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.27923993 +0000 UTC m=+1061.210706237" lastFinishedPulling="2025-09-30 13:52:48.48526623 +0000 UTC m=+1068.416732537" observedRunningTime="2025-09-30 13:52:49.217027563 +0000 UTC m=+1069.148493880" watchObservedRunningTime="2025-09-30 13:52:49.21817229 +0000 UTC m=+1069.149638597" Sep 30 13:52:50 crc kubenswrapper[4783]: I0930 13:52:50.133504 4783 generic.go:334] "Generic (PLEG): container finished" podID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerID="bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc" exitCode=0 Sep 30 13:52:50 crc kubenswrapper[4783]: I0930 13:52:50.133877 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerDied","Data":"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc"} Sep 30 13:52:51 crc kubenswrapper[4783]: I0930 13:52:51.143288 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerStarted","Data":"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204"} Sep 30 13:52:52 crc kubenswrapper[4783]: I0930 13:52:52.198197 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.596783363 podStartE2EDuration="20.198178757s" podCreationTimestamp="2025-09-30 13:52:32 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.29202457 +0000 UTC m=+1061.223490877" lastFinishedPulling="2025-09-30 13:52:51.893419924 +0000 UTC m=+1071.824886271" observedRunningTime="2025-09-30 13:52:52.17453727 +0000 UTC m=+1072.106003577" watchObservedRunningTime="2025-09-30 13:52:52.198178757 +0000 UTC m=+1072.129645064" Sep 30 13:52:52 crc kubenswrapper[4783]: I0930 13:52:52.321029 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:52 crc kubenswrapper[4783]: I0930 13:52:52.367811 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.174617 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerStarted","Data":"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.175081 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.181214 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerStarted","Data":"f963b067d2255b5af1022d6dd485fe3e8d7ab5715354c349f168effd5f9bdcd4"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.184546 4783 generic.go:334] "Generic (PLEG): container finished" podID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerID="847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d" exitCode=0 Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.184633 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerDied","Data":"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.188537 4783 generic.go:334] "Generic (PLEG): container finished" podID="a6707a42-ffcc-431c-9631-2e98348946a5" containerID="b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30" exitCode=0 Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.188626 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7869668685-5bt7n" event={"ID":"a6707a42-ffcc-431c-9631-2e98348946a5","Type":"ContainerDied","Data":"b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.202267 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerStarted","Data":"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.203483 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.208776 4783 generic.go:334] "Generic (PLEG): container finished" podID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerID="759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a" exitCode=0 Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.208839 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerDied","Data":"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a"} Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.224850 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ttc29" podStartSLOduration=12.651262146 podStartE2EDuration="19.224825176s" podCreationTimestamp="2025-09-30 13:52:34 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.478037428 +0000 UTC m=+1061.409503735" lastFinishedPulling="2025-09-30 13:52:48.051600458 +0000 UTC m=+1067.983066765" observedRunningTime="2025-09-30 13:52:53.222858743 +0000 UTC m=+1073.154325080" watchObservedRunningTime="2025-09-30 13:52:53.224825176 +0000 UTC m=+1073.156291513" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.256568 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=6.719844437 podStartE2EDuration="16.256537482s" podCreationTimestamp="2025-09-30 13:52:37 +0000 UTC" firstStartedPulling="2025-09-30 13:52:42.37081376 +0000 UTC m=+1062.302280067" lastFinishedPulling="2025-09-30 13:52:51.907506765 +0000 UTC m=+1071.838973112" observedRunningTime="2025-09-30 13:52:53.24836177 +0000 UTC m=+1073.179828087" watchObservedRunningTime="2025-09-30 13:52:53.256537482 +0000 UTC m=+1073.188003809" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.272403 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.451853 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.451903 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.520222 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.600689 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.627674 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.629072 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.630815 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.656918 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.674744 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.675771 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.677537 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.690212 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.766903 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnn4r\" (UniqueName: \"kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.766951 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767038 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb7mh\" (UniqueName: \"kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767073 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767124 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767177 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767204 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767235 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767256 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.767278 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.857535 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868376 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnn4r\" (UniqueName: \"kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868428 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868469 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb7mh\" (UniqueName: \"kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868497 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868518 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868546 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868562 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868579 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868599 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868618 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.869748 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.868938 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.870543 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.870554 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.870678 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.880997 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.881032 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.893947 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb7mh\" (UniqueName: \"kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh\") pod \"ovn-controller-metrics-9w2wl\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.901982 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnn4r\" (UniqueName: \"kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r\") pod \"dnsmasq-dns-86b869995c-xpf2q\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.945815 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:53 crc kubenswrapper[4783]: I0930 13:52:53.994782 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.128589 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.192800 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.194333 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.196461 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.234685 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.289432 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.289502 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.289527 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrzgg\" (UniqueName: \"kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.289610 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.289738 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.301490 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerStarted","Data":"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982"} Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.311754 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerStarted","Data":"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14"} Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.318758 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7869668685-5bt7n" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="dnsmasq-dns" containerID="cri-o://c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5" gracePeriod=10 Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.318964 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7869668685-5bt7n" event={"ID":"a6707a42-ffcc-431c-9631-2e98348946a5","Type":"ContainerStarted","Data":"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5"} Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.319970 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.320005 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.344783 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=20.911504574 podStartE2EDuration="28.344764874s" podCreationTimestamp="2025-09-30 13:52:26 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.050461871 +0000 UTC m=+1060.981928178" lastFinishedPulling="2025-09-30 13:52:48.483722171 +0000 UTC m=+1068.415188478" observedRunningTime="2025-09-30 13:52:54.343205915 +0000 UTC m=+1074.274672222" watchObservedRunningTime="2025-09-30 13:52:54.344764874 +0000 UTC m=+1074.276231181" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.384705 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.384174 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=20.549396259 podStartE2EDuration="27.384156246s" podCreationTimestamp="2025-09-30 13:52:27 +0000 UTC" firstStartedPulling="2025-09-30 13:52:41.050030597 +0000 UTC m=+1060.981496904" lastFinishedPulling="2025-09-30 13:52:47.884790584 +0000 UTC m=+1067.816256891" observedRunningTime="2025-09-30 13:52:54.373554787 +0000 UTC m=+1074.305021114" watchObservedRunningTime="2025-09-30 13:52:54.384156246 +0000 UTC m=+1074.315622553" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.396269 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.396405 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.396433 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.396455 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrzgg\" (UniqueName: \"kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.396508 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.397591 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.398338 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.398797 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7869668685-5bt7n" podStartSLOduration=3.226249216 podStartE2EDuration="31.398787035s" podCreationTimestamp="2025-09-30 13:52:23 +0000 UTC" firstStartedPulling="2025-09-30 13:52:24.207471783 +0000 UTC m=+1044.138938090" lastFinishedPulling="2025-09-30 13:52:52.380009592 +0000 UTC m=+1072.311475909" observedRunningTime="2025-09-30 13:52:54.3958198 +0000 UTC m=+1074.327286107" watchObservedRunningTime="2025-09-30 13:52:54.398787035 +0000 UTC m=+1074.330253342" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.401483 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.401678 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.427509 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrzgg\" (UniqueName: \"kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg\") pod \"dnsmasq-dns-5d86d68bf7-lxqvg\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.563675 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.564900 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.568406 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-v2g9v" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.568409 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.568699 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.568835 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.582368 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.601197 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.604457 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.617643 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702071 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2trm\" (UniqueName: \"kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm\") pod \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702130 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config\") pod \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702264 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc\") pod \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\" (UID: \"f4d64991-7de8-4788-afe9-d95c7b72ddd4\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702486 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702513 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702540 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702555 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702580 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmgcd\" (UniqueName: \"kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702619 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.702653 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.703602 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f4d64991-7de8-4788-afe9-d95c7b72ddd4" (UID: "f4d64991-7de8-4788-afe9-d95c7b72ddd4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.703649 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config" (OuterVolumeSpecName: "config") pod "f4d64991-7de8-4788-afe9-d95c7b72ddd4" (UID: "f4d64991-7de8-4788-afe9-d95c7b72ddd4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.706877 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm" (OuterVolumeSpecName: "kube-api-access-j2trm") pod "f4d64991-7de8-4788-afe9-d95c7b72ddd4" (UID: "f4d64991-7de8-4788-afe9-d95c7b72ddd4"). InnerVolumeSpecName "kube-api-access-j2trm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804360 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804648 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804702 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804729 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804755 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804772 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804795 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmgcd\" (UniqueName: \"kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804833 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804847 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2trm\" (UniqueName: \"kubernetes.io/projected/f4d64991-7de8-4788-afe9-d95c7b72ddd4-kube-api-access-j2trm\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.804859 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d64991-7de8-4788-afe9-d95c7b72ddd4-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.805548 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.805845 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.806371 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.808004 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.809244 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.810847 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.812953 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.815139 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.821837 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmgcd\" (UniqueName: \"kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd\") pod \"ovn-northd-0\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: W0930 13:52:54.822911 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod016a6efa_f0f3_404c_8423_d58eda1d7046.slice/crio-9c773d1ed7a15a9b1d2089add2516a660896cc57b4ad1c2e04a5589ef73ed379 WatchSource:0}: Error finding container 9c773d1ed7a15a9b1d2089add2516a660896cc57b4ad1c2e04a5589ef73ed379: Status 404 returned error can't find the container with id 9c773d1ed7a15a9b1d2089add2516a660896cc57b4ad1c2e04a5589ef73ed379 Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.905559 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6mx5\" (UniqueName: \"kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5\") pod \"a6707a42-ffcc-431c-9631-2e98348946a5\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.905699 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc\") pod \"a6707a42-ffcc-431c-9631-2e98348946a5\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.905788 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config\") pod \"a6707a42-ffcc-431c-9631-2e98348946a5\" (UID: \"a6707a42-ffcc-431c-9631-2e98348946a5\") " Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.907860 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.914350 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5" (OuterVolumeSpecName: "kube-api-access-t6mx5") pod "a6707a42-ffcc-431c-9631-2e98348946a5" (UID: "a6707a42-ffcc-431c-9631-2e98348946a5"). InnerVolumeSpecName "kube-api-access-t6mx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.960989 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6707a42-ffcc-431c-9631-2e98348946a5" (UID: "a6707a42-ffcc-431c-9631-2e98348946a5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:54 crc kubenswrapper[4783]: I0930 13:52:54.965204 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config" (OuterVolumeSpecName: "config") pod "a6707a42-ffcc-431c-9631-2e98348946a5" (UID: "a6707a42-ffcc-431c-9631-2e98348946a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.011331 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.011550 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6707a42-ffcc-431c-9631-2e98348946a5-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.011562 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6mx5\" (UniqueName: \"kubernetes.io/projected/a6707a42-ffcc-431c-9631-2e98348946a5-kube-api-access-t6mx5\") on node \"crc\" DevicePath \"\"" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.056551 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:52:55 crc kubenswrapper[4783]: E0930 13:52:55.136997 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d64991_7de8_4788_afe9_d95c7b72ddd4.slice\": RecentStats: unable to find data in memory cache]" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.328783 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" event={"ID":"f4d64991-7de8-4788-afe9-d95c7b72ddd4","Type":"ContainerDied","Data":"343da532930a0b28b92ff2e4e66aa3d145e373cc0c7c615221661ff469f040c0"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.329198 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-rbpq5" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.333517 4783 generic.go:334] "Generic (PLEG): container finished" podID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerID="3db826bdcbcc08aa252a9ee56f45d3e9e0a1daec46d3e0b244b460aaf1dc4dff" exitCode=0 Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.333590 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" event={"ID":"016a6efa-f0f3-404c-8423-d58eda1d7046","Type":"ContainerDied","Data":"3db826bdcbcc08aa252a9ee56f45d3e9e0a1daec46d3e0b244b460aaf1dc4dff"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.333618 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" event={"ID":"016a6efa-f0f3-404c-8423-d58eda1d7046","Type":"ContainerStarted","Data":"9c773d1ed7a15a9b1d2089add2516a660896cc57b4ad1c2e04a5589ef73ed379"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.336600 4783 generic.go:334] "Generic (PLEG): container finished" podID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerID="0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf" exitCode=0 Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.336725 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" event={"ID":"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162","Type":"ContainerDied","Data":"0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.336762 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" event={"ID":"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162","Type":"ContainerStarted","Data":"e4655cf2c727a47095be9b9c4cebd1b218d88d8e987b5949f07ff4cdd691587b"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.342124 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-9w2wl" event={"ID":"801ddf87-455e-4941-8637-4c2f5da49d41","Type":"ContainerStarted","Data":"25fabd726430aea0cdf79b31be6e26feb4aede29cf3f33ef9ba4f73371bf719a"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.342184 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-9w2wl" event={"ID":"801ddf87-455e-4941-8637-4c2f5da49d41","Type":"ContainerStarted","Data":"b20a30aecc57bec13d1844a1ed45f18452c8fbb06020939958b9c83740870e52"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.347841 4783 generic.go:334] "Generic (PLEG): container finished" podID="a6707a42-ffcc-431c-9631-2e98348946a5" containerID="c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5" exitCode=0 Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.347902 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7869668685-5bt7n" event={"ID":"a6707a42-ffcc-431c-9631-2e98348946a5","Type":"ContainerDied","Data":"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.347959 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7869668685-5bt7n" event={"ID":"a6707a42-ffcc-431c-9631-2e98348946a5","Type":"ContainerDied","Data":"cae58a3f2050f758d59edfa2cc93e4ef15e36b06d32b994b638ce30db8e432a1"} Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.347986 4783 scope.go:117] "RemoveContainer" containerID="c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.348661 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7869668685-5bt7n" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.372288 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.379343 4783 scope.go:117] "RemoveContainer" containerID="b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.380190 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-9w2wl" podStartSLOduration=2.380170405 podStartE2EDuration="2.380170405s" podCreationTimestamp="2025-09-30 13:52:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:52:55.376639402 +0000 UTC m=+1075.308105739" watchObservedRunningTime="2025-09-30 13:52:55.380170405 +0000 UTC m=+1075.311636732" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.433248 4783 scope.go:117] "RemoveContainer" containerID="c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5" Sep 30 13:52:55 crc kubenswrapper[4783]: E0930 13:52:55.433626 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5\": container with ID starting with c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5 not found: ID does not exist" containerID="c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.433653 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5"} err="failed to get container status \"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5\": rpc error: code = NotFound desc = could not find container \"c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5\": container with ID starting with c041e0053b5144df5622086034b249bc5baba4473de570e3babbe4b382eaa1d5 not found: ID does not exist" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.433673 4783 scope.go:117] "RemoveContainer" containerID="b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30" Sep 30 13:52:55 crc kubenswrapper[4783]: E0930 13:52:55.433841 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30\": container with ID starting with b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30 not found: ID does not exist" containerID="b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.433864 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30"} err="failed to get container status \"b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30\": rpc error: code = NotFound desc = could not find container \"b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30\": container with ID starting with b866065c4179599acb6f2c04f4700353e7ba467f54beef5a2713976f62702a30 not found: ID does not exist" Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.456339 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.461396 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-rbpq5"] Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.473803 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:55 crc kubenswrapper[4783]: I0930 13:52:55.478746 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7869668685-5bt7n"] Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.360112 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" event={"ID":"016a6efa-f0f3-404c-8423-d58eda1d7046","Type":"ContainerStarted","Data":"5679bf3e3cc760ea4cad94616d292743348f61aed73c046f0a1f4b6ef8a85068"} Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.360604 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.364383 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerStarted","Data":"d59fffe18a7618daae0e434eeba0899a531be1e6a77a2a4f939e1058850e7f60"} Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.368930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" event={"ID":"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162","Type":"ContainerStarted","Data":"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515"} Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.369535 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.388757 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" podStartSLOduration=3.388725744 podStartE2EDuration="3.388725744s" podCreationTimestamp="2025-09-30 13:52:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:52:56.379334624 +0000 UTC m=+1076.310800941" watchObservedRunningTime="2025-09-30 13:52:56.388725744 +0000 UTC m=+1076.320192091" Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.404963 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" podStartSLOduration=2.404938564 podStartE2EDuration="2.404938564s" podCreationTimestamp="2025-09-30 13:52:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:52:56.398053473 +0000 UTC m=+1076.329519860" watchObservedRunningTime="2025-09-30 13:52:56.404938564 +0000 UTC m=+1076.336404881" Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.852464 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" path="/var/lib/kubelet/pods/a6707a42-ffcc-431c-9631-2e98348946a5/volumes" Sep 30 13:52:56 crc kubenswrapper[4783]: I0930 13:52:56.853538 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4d64991-7de8-4788-afe9-d95c7b72ddd4" path="/var/lib/kubelet/pods/f4d64991-7de8-4788-afe9-d95c7b72ddd4/volumes" Sep 30 13:52:57 crc kubenswrapper[4783]: I0930 13:52:57.382037 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerStarted","Data":"ad940715dd3642761ec895a6d3116d512bb6c980dd421be308131af63b9114b0"} Sep 30 13:52:57 crc kubenswrapper[4783]: I0930 13:52:57.382175 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerStarted","Data":"35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318"} Sep 30 13:52:57 crc kubenswrapper[4783]: I0930 13:52:57.423644 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.106382819 podStartE2EDuration="3.423615868s" podCreationTimestamp="2025-09-30 13:52:54 +0000 UTC" firstStartedPulling="2025-09-30 13:52:55.388935775 +0000 UTC m=+1075.320402092" lastFinishedPulling="2025-09-30 13:52:56.706168834 +0000 UTC m=+1076.637635141" observedRunningTime="2025-09-30 13:52:57.41371855 +0000 UTC m=+1077.345184927" watchObservedRunningTime="2025-09-30 13:52:57.423615868 +0000 UTC m=+1077.355082225" Sep 30 13:52:57 crc kubenswrapper[4783]: I0930 13:52:57.483460 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 13:52:57 crc kubenswrapper[4783]: I0930 13:52:57.483575 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 13:52:58 crc kubenswrapper[4783]: I0930 13:52:58.363101 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:58 crc kubenswrapper[4783]: I0930 13:52:58.363466 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 13:52:58 crc kubenswrapper[4783]: I0930 13:52:58.391335 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.450918 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.521035 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.521252 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="dnsmasq-dns" containerID="cri-o://5679bf3e3cc760ea4cad94616d292743348f61aed73c046f0a1f4b6ef8a85068" gracePeriod=10 Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.527522 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.550267 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:53:00 crc kubenswrapper[4783]: E0930 13:53:00.550558 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="dnsmasq-dns" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.550573 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="dnsmasq-dns" Sep 30 13:53:00 crc kubenswrapper[4783]: E0930 13:53:00.550585 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="init" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.550592 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="init" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.550756 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6707a42-ffcc-431c-9631-2e98348946a5" containerName="dnsmasq-dns" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.551582 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.574853 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.618595 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.618679 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf7f9\" (UniqueName: \"kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.618698 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.618726 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.618759 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.720508 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.720591 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf7f9\" (UniqueName: \"kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.720612 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.720640 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.720673 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.732958 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.733085 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.733097 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.733421 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.741125 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf7f9\" (UniqueName: \"kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9\") pod \"dnsmasq-dns-6c6d5d5bd7-vrp8p\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:00 crc kubenswrapper[4783]: I0930 13:53:00.873986 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.386796 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.445571 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" event={"ID":"6b8e5aea-dccd-4876-8d92-3dab97f8db58","Type":"ContainerStarted","Data":"c259dff0ca0f218b3cc59bca95b91e577164a746ddde2a06446d9c095a8af1f5"} Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.454534 4783 generic.go:334] "Generic (PLEG): container finished" podID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerID="5679bf3e3cc760ea4cad94616d292743348f61aed73c046f0a1f4b6ef8a85068" exitCode=0 Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.454578 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" event={"ID":"016a6efa-f0f3-404c-8423-d58eda1d7046","Type":"ContainerDied","Data":"5679bf3e3cc760ea4cad94616d292743348f61aed73c046f0a1f4b6ef8a85068"} Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.683312 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.703540 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.703692 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.705506 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.705533 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.705908 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-584z2" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.705949 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.745257 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.745393 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq6fk\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.745441 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.745506 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.745564 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.847814 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.847901 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.848124 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.848263 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq6fk\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.848335 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.848597 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.849095 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.849172 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: E0930 13:53:01.849566 4783 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 13:53:01 crc kubenswrapper[4783]: E0930 13:53:01.849639 4783 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 13:53:01 crc kubenswrapper[4783]: E0930 13:53:01.849761 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift podName:3da50d95-fee8-4e78-ad46-c2d8ac95adc2 nodeName:}" failed. No retries permitted until 2025-09-30 13:53:02.349722072 +0000 UTC m=+1082.281188439 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift") pod "swift-storage-0" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2") : configmap "swift-ring-files" not found Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.880636 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq6fk\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:01 crc kubenswrapper[4783]: I0930 13:53:01.888798 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:02 crc kubenswrapper[4783]: I0930 13:53:02.356786 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:02 crc kubenswrapper[4783]: E0930 13:53:02.356972 4783 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 13:53:02 crc kubenswrapper[4783]: E0930 13:53:02.356986 4783 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 13:53:02 crc kubenswrapper[4783]: E0930 13:53:02.357028 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift podName:3da50d95-fee8-4e78-ad46-c2d8ac95adc2 nodeName:}" failed. No retries permitted until 2025-09-30 13:53:03.357014562 +0000 UTC m=+1083.288480869 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift") pod "swift-storage-0" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2") : configmap "swift-ring-files" not found Sep 30 13:53:03 crc kubenswrapper[4783]: I0930 13:53:03.373401 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:03 crc kubenswrapper[4783]: E0930 13:53:03.373727 4783 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 13:53:03 crc kubenswrapper[4783]: E0930 13:53:03.374115 4783 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 13:53:03 crc kubenswrapper[4783]: E0930 13:53:03.374259 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift podName:3da50d95-fee8-4e78-ad46-c2d8ac95adc2 nodeName:}" failed. No retries permitted until 2025-09-30 13:53:05.374200258 +0000 UTC m=+1085.305666605 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift") pod "swift-storage-0" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2") : configmap "swift-ring-files" not found Sep 30 13:53:03 crc kubenswrapper[4783]: I0930 13:53:03.427979 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 13:53:03 crc kubenswrapper[4783]: I0930 13:53:03.511403 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" probeResult="failure" output=< Sep 30 13:53:03 crc kubenswrapper[4783]: wsrep_local_state_comment (Joined) differs from Synced Sep 30 13:53:03 crc kubenswrapper[4783]: > Sep 30 13:53:03 crc kubenswrapper[4783]: I0930 13:53:03.947100 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Sep 30 13:53:04 crc kubenswrapper[4783]: I0930 13:53:04.603495 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.405175 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:05 crc kubenswrapper[4783]: E0930 13:53:05.405347 4783 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 13:53:05 crc kubenswrapper[4783]: E0930 13:53:05.405802 4783 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 13:53:05 crc kubenswrapper[4783]: E0930 13:53:05.405892 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift podName:3da50d95-fee8-4e78-ad46-c2d8ac95adc2 nodeName:}" failed. No retries permitted until 2025-09-30 13:53:09.405872944 +0000 UTC m=+1089.337339241 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift") pod "swift-storage-0" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2") : configmap "swift-ring-files" not found Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.718912 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-j5vlb"] Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.720088 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.726627 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.726888 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.727066 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.732583 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5vlb"] Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757170 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757222 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltvqq\" (UniqueName: \"kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757256 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757336 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757388 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757425 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.757462 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858802 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858851 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858893 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858942 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858977 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.858996 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltvqq\" (UniqueName: \"kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.859012 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.860721 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.860812 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.860871 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.864987 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.866254 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.868514 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:05 crc kubenswrapper[4783]: I0930 13:53:05.876635 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltvqq\" (UniqueName: \"kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq\") pod \"swift-ring-rebalance-j5vlb\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.035444 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.373297 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5vlb"] Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.579828 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5vlb" event={"ID":"2976d001-3d08-4721-85db-95c0a0de28b8","Type":"ContainerStarted","Data":"f83960102295b7c6916eb2caa20b39d6f9f416d9072b62f5fe74d19ae38f96fe"} Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.625190 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.775686 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc\") pod \"016a6efa-f0f3-404c-8423-d58eda1d7046\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.775808 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnn4r\" (UniqueName: \"kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r\") pod \"016a6efa-f0f3-404c-8423-d58eda1d7046\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.775864 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config\") pod \"016a6efa-f0f3-404c-8423-d58eda1d7046\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.775908 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb\") pod \"016a6efa-f0f3-404c-8423-d58eda1d7046\" (UID: \"016a6efa-f0f3-404c-8423-d58eda1d7046\") " Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.782095 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r" (OuterVolumeSpecName: "kube-api-access-nnn4r") pod "016a6efa-f0f3-404c-8423-d58eda1d7046" (UID: "016a6efa-f0f3-404c-8423-d58eda1d7046"). InnerVolumeSpecName "kube-api-access-nnn4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.813683 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config" (OuterVolumeSpecName: "config") pod "016a6efa-f0f3-404c-8423-d58eda1d7046" (UID: "016a6efa-f0f3-404c-8423-d58eda1d7046"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.813849 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "016a6efa-f0f3-404c-8423-d58eda1d7046" (UID: "016a6efa-f0f3-404c-8423-d58eda1d7046"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.841092 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "016a6efa-f0f3-404c-8423-d58eda1d7046" (UID: "016a6efa-f0f3-404c-8423-d58eda1d7046"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.877895 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.882514 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.882557 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnn4r\" (UniqueName: \"kubernetes.io/projected/016a6efa-f0f3-404c-8423-d58eda1d7046-kube-api-access-nnn4r\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.882580 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.882803 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/016a6efa-f0f3-404c-8423-d58eda1d7046-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:06 crc kubenswrapper[4783]: I0930 13:53:06.943530 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.543541 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.599879 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" event={"ID":"016a6efa-f0f3-404c-8423-d58eda1d7046","Type":"ContainerDied","Data":"9c773d1ed7a15a9b1d2089add2516a660896cc57b4ad1c2e04a5589ef73ed379"} Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.600263 4783 scope.go:117] "RemoveContainer" containerID="5679bf3e3cc760ea4cad94616d292743348f61aed73c046f0a1f4b6ef8a85068" Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.599936 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b869995c-xpf2q" Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.603855 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerID="0eab26db4441a40d22cd716000814292f29d2fa07c4cf7152707175cdc6b30a8" exitCode=0 Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.603985 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" event={"ID":"6b8e5aea-dccd-4876-8d92-3dab97f8db58","Type":"ContainerDied","Data":"0eab26db4441a40d22cd716000814292f29d2fa07c4cf7152707175cdc6b30a8"} Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.659002 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:53:07 crc kubenswrapper[4783]: I0930 13:53:07.664544 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86b869995c-xpf2q"] Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.534355 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gg4cw"] Sep 30 13:53:08 crc kubenswrapper[4783]: E0930 13:53:08.535040 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="init" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.535061 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="init" Sep 30 13:53:08 crc kubenswrapper[4783]: E0930 13:53:08.535084 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="dnsmasq-dns" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.535091 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="dnsmasq-dns" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.535357 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" containerName="dnsmasq-dns" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.535997 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.548208 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gg4cw"] Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.713939 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsjxj\" (UniqueName: \"kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj\") pod \"keystone-db-create-gg4cw\" (UID: \"262bb3ed-93d0-4389-a89f-c2b2fe5623e0\") " pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.752821 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-slhq8"] Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.753842 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-slhq8" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.771438 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-slhq8"] Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.816253 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsjxj\" (UniqueName: \"kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj\") pod \"keystone-db-create-gg4cw\" (UID: \"262bb3ed-93d0-4389-a89f-c2b2fe5623e0\") " pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.839263 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsjxj\" (UniqueName: \"kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj\") pod \"keystone-db-create-gg4cw\" (UID: \"262bb3ed-93d0-4389-a89f-c2b2fe5623e0\") " pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.852904 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="016a6efa-f0f3-404c-8423-d58eda1d7046" path="/var/lib/kubelet/pods/016a6efa-f0f3-404c-8423-d58eda1d7046/volumes" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.861918 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:08 crc kubenswrapper[4783]: I0930 13:53:08.917881 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqz6j\" (UniqueName: \"kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j\") pod \"placement-db-create-slhq8\" (UID: \"359bd4f5-9b93-470e-ab89-d9e05636adf0\") " pod="openstack/placement-db-create-slhq8" Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.019066 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqz6j\" (UniqueName: \"kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j\") pod \"placement-db-create-slhq8\" (UID: \"359bd4f5-9b93-470e-ab89-d9e05636adf0\") " pod="openstack/placement-db-create-slhq8" Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.037567 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqz6j\" (UniqueName: \"kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j\") pod \"placement-db-create-slhq8\" (UID: \"359bd4f5-9b93-470e-ab89-d9e05636adf0\") " pod="openstack/placement-db-create-slhq8" Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.073098 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-slhq8" Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.426417 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:09 crc kubenswrapper[4783]: E0930 13:53:09.427681 4783 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 13:53:09 crc kubenswrapper[4783]: E0930 13:53:09.427718 4783 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 13:53:09 crc kubenswrapper[4783]: E0930 13:53:09.427803 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift podName:3da50d95-fee8-4e78-ad46-c2d8ac95adc2 nodeName:}" failed. No retries permitted until 2025-09-30 13:53:17.42777429 +0000 UTC m=+1097.359240637 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift") pod "swift-storage-0" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2") : configmap "swift-ring-files" not found Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.484159 4783 scope.go:117] "RemoveContainer" containerID="3db826bdcbcc08aa252a9ee56f45d3e9e0a1daec46d3e0b244b460aaf1dc4dff" Sep 30 13:53:09 crc kubenswrapper[4783]: I0930 13:53:09.972875 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 13:53:10 crc kubenswrapper[4783]: W0930 13:53:10.087494 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod359bd4f5_9b93_470e_ab89_d9e05636adf0.slice/crio-84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124 WatchSource:0}: Error finding container 84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124: Status 404 returned error can't find the container with id 84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124 Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.091837 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-slhq8"] Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.185229 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gg4cw"] Sep 30 13:53:10 crc kubenswrapper[4783]: W0930 13:53:10.193788 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod262bb3ed_93d0_4389_a89f_c2b2fe5623e0.slice/crio-bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6 WatchSource:0}: Error finding container bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6: Status 404 returned error can't find the container with id bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6 Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.658688 4783 generic.go:334] "Generic (PLEG): container finished" podID="359bd4f5-9b93-470e-ab89-d9e05636adf0" containerID="21aff4dd95bde5a8670e89f3dd3efd6a09ad9db3e02d379da32cd13f67cabcae" exitCode=0 Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.658772 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-slhq8" event={"ID":"359bd4f5-9b93-470e-ab89-d9e05636adf0","Type":"ContainerDied","Data":"21aff4dd95bde5a8670e89f3dd3efd6a09ad9db3e02d379da32cd13f67cabcae"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.658807 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-slhq8" event={"ID":"359bd4f5-9b93-470e-ab89-d9e05636adf0","Type":"ContainerStarted","Data":"84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.661718 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" event={"ID":"6b8e5aea-dccd-4876-8d92-3dab97f8db58","Type":"ContainerStarted","Data":"8fb3b92cc631f3136e13ad20c5dbf5838c3909467c0aa5722a2fff2dcaa58088"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.662788 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.672906 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5vlb" event={"ID":"2976d001-3d08-4721-85db-95c0a0de28b8","Type":"ContainerStarted","Data":"c1abab52854f97e2baa52627f04d820409556ef44e46042b8cc88ae108210266"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.675487 4783 generic.go:334] "Generic (PLEG): container finished" podID="262bb3ed-93d0-4389-a89f-c2b2fe5623e0" containerID="8b08ad557e8661b66aeaeaebf65ee5d1552ababcaf7404348631f3f5ac12cc25" exitCode=0 Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.675524 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg4cw" event={"ID":"262bb3ed-93d0-4389-a89f-c2b2fe5623e0","Type":"ContainerDied","Data":"8b08ad557e8661b66aeaeaebf65ee5d1552ababcaf7404348631f3f5ac12cc25"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.675544 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg4cw" event={"ID":"262bb3ed-93d0-4389-a89f-c2b2fe5623e0","Type":"ContainerStarted","Data":"bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6"} Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.720173 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" podStartSLOduration=10.720152412000001 podStartE2EDuration="10.720152412s" podCreationTimestamp="2025-09-30 13:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:10.709415568 +0000 UTC m=+1090.640881955" watchObservedRunningTime="2025-09-30 13:53:10.720152412 +0000 UTC m=+1090.651618729" Sep 30 13:53:10 crc kubenswrapper[4783]: I0930 13:53:10.747041 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-j5vlb" podStartSLOduration=2.503772333 podStartE2EDuration="5.747023753s" podCreationTimestamp="2025-09-30 13:53:05 +0000 UTC" firstStartedPulling="2025-09-30 13:53:06.384659931 +0000 UTC m=+1086.316126248" lastFinishedPulling="2025-09-30 13:53:09.627911331 +0000 UTC m=+1089.559377668" observedRunningTime="2025-09-30 13:53:10.744940916 +0000 UTC m=+1090.676407243" watchObservedRunningTime="2025-09-30 13:53:10.747023753 +0000 UTC m=+1090.678490060" Sep 30 13:53:11 crc kubenswrapper[4783]: I0930 13:53:11.685086 4783 generic.go:334] "Generic (PLEG): container finished" podID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerID="7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325" exitCode=0 Sep 30 13:53:11 crc kubenswrapper[4783]: I0930 13:53:11.685129 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerDied","Data":"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325"} Sep 30 13:53:11 crc kubenswrapper[4783]: I0930 13:53:11.690512 4783 generic.go:334] "Generic (PLEG): container finished" podID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerID="ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b" exitCode=0 Sep 30 13:53:11 crc kubenswrapper[4783]: I0930 13:53:11.690699 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerDied","Data":"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b"} Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.059058 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.137839 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-slhq8" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.181017 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsjxj\" (UniqueName: \"kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj\") pod \"262bb3ed-93d0-4389-a89f-c2b2fe5623e0\" (UID: \"262bb3ed-93d0-4389-a89f-c2b2fe5623e0\") " Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.188862 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj" (OuterVolumeSpecName: "kube-api-access-fsjxj") pod "262bb3ed-93d0-4389-a89f-c2b2fe5623e0" (UID: "262bb3ed-93d0-4389-a89f-c2b2fe5623e0"). InnerVolumeSpecName "kube-api-access-fsjxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.283179 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqz6j\" (UniqueName: \"kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j\") pod \"359bd4f5-9b93-470e-ab89-d9e05636adf0\" (UID: \"359bd4f5-9b93-470e-ab89-d9e05636adf0\") " Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.283937 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsjxj\" (UniqueName: \"kubernetes.io/projected/262bb3ed-93d0-4389-a89f-c2b2fe5623e0-kube-api-access-fsjxj\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.287345 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j" (OuterVolumeSpecName: "kube-api-access-tqz6j") pod "359bd4f5-9b93-470e-ab89-d9e05636adf0" (UID: "359bd4f5-9b93-470e-ab89-d9e05636adf0"). InnerVolumeSpecName "kube-api-access-tqz6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.385632 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqz6j\" (UniqueName: \"kubernetes.io/projected/359bd4f5-9b93-470e-ab89-d9e05636adf0-kube-api-access-tqz6j\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.703510 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gg4cw" event={"ID":"262bb3ed-93d0-4389-a89f-c2b2fe5623e0","Type":"ContainerDied","Data":"bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6"} Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.703790 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb695fe5b24f1eb9de4c3c262308e566ce7476ded29ab9969c48174ccabd5de6" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.703869 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gg4cw" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.720608 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-slhq8" event={"ID":"359bd4f5-9b93-470e-ab89-d9e05636adf0","Type":"ContainerDied","Data":"84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124"} Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.720680 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-slhq8" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.720689 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84ebadfa889a06d1d18ed1b43bc92be11fdc8c89e62d87845a5ef6ceb9cab124" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.727380 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerStarted","Data":"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d"} Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.727664 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.729753 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerStarted","Data":"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998"} Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.730516 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.760854 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.790319715 podStartE2EDuration="49.760838867s" podCreationTimestamp="2025-09-30 13:52:23 +0000 UTC" firstStartedPulling="2025-09-30 13:52:25.518008997 +0000 UTC m=+1045.449475304" lastFinishedPulling="2025-09-30 13:52:40.488528149 +0000 UTC m=+1060.419994456" observedRunningTime="2025-09-30 13:53:12.759092251 +0000 UTC m=+1092.690558558" watchObservedRunningTime="2025-09-30 13:53:12.760838867 +0000 UTC m=+1092.692305174" Sep 30 13:53:12 crc kubenswrapper[4783]: I0930 13:53:12.789789 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.596675878 podStartE2EDuration="48.789772354s" podCreationTimestamp="2025-09-30 13:52:24 +0000 UTC" firstStartedPulling="2025-09-30 13:52:26.308987397 +0000 UTC m=+1046.240453704" lastFinishedPulling="2025-09-30 13:52:40.502083873 +0000 UTC m=+1060.433550180" observedRunningTime="2025-09-30 13:53:12.785667672 +0000 UTC m=+1092.717133999" watchObservedRunningTime="2025-09-30 13:53:12.789772354 +0000 UTC m=+1092.721238661" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.097343 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-d5qms"] Sep 30 13:53:14 crc kubenswrapper[4783]: E0930 13:53:14.097742 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262bb3ed-93d0-4389-a89f-c2b2fe5623e0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.097759 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="262bb3ed-93d0-4389-a89f-c2b2fe5623e0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: E0930 13:53:14.097780 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359bd4f5-9b93-470e-ab89-d9e05636adf0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.097789 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="359bd4f5-9b93-470e-ab89-d9e05636adf0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.097998 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="359bd4f5-9b93-470e-ab89-d9e05636adf0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.098031 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="262bb3ed-93d0-4389-a89f-c2b2fe5623e0" containerName="mariadb-database-create" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.098750 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d5qms" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.106329 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-d5qms"] Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.222171 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcsvs\" (UniqueName: \"kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs\") pod \"glance-db-create-d5qms\" (UID: \"c5499617-8645-4b0b-9b0e-5dbe617afc92\") " pod="openstack/glance-db-create-d5qms" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.324513 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcsvs\" (UniqueName: \"kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs\") pod \"glance-db-create-d5qms\" (UID: \"c5499617-8645-4b0b-9b0e-5dbe617afc92\") " pod="openstack/glance-db-create-d5qms" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.347575 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcsvs\" (UniqueName: \"kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs\") pod \"glance-db-create-d5qms\" (UID: \"c5499617-8645-4b0b-9b0e-5dbe617afc92\") " pod="openstack/glance-db-create-d5qms" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.423253 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d5qms" Sep 30 13:53:14 crc kubenswrapper[4783]: I0930 13:53:14.881947 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-d5qms"] Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.751938 4783 generic.go:334] "Generic (PLEG): container finished" podID="c5499617-8645-4b0b-9b0e-5dbe617afc92" containerID="4a9a7157e02c3b85c335f189160f0f21e4eeb272172ff1581b80ed8d12d5b4b1" exitCode=0 Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.752287 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d5qms" event={"ID":"c5499617-8645-4b0b-9b0e-5dbe617afc92","Type":"ContainerDied","Data":"4a9a7157e02c3b85c335f189160f0f21e4eeb272172ff1581b80ed8d12d5b4b1"} Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.752315 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d5qms" event={"ID":"c5499617-8645-4b0b-9b0e-5dbe617afc92","Type":"ContainerStarted","Data":"1f90c4ebb4dde98429aeb96cc6e8b4056d6f413b054cf1b5a438ec15793de90b"} Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.876389 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.963316 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:53:15 crc kubenswrapper[4783]: I0930 13:53:15.964573 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="dnsmasq-dns" containerID="cri-o://054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515" gracePeriod=10 Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.591823 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.695431 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc\") pod \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.696121 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb\") pod \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.696173 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb\") pod \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.696294 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config\") pod \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.696368 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrzgg\" (UniqueName: \"kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg\") pod \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\" (UID: \"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162\") " Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.702392 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg" (OuterVolumeSpecName: "kube-api-access-hrzgg") pod "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" (UID: "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162"). InnerVolumeSpecName "kube-api-access-hrzgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.738130 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" (UID: "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.742433 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" (UID: "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.745726 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" (UID: "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.757415 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config" (OuterVolumeSpecName: "config") pod "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" (UID: "3dc2c97a-e1b4-46e9-87bc-3b73f22f1162"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.763869 4783 generic.go:334] "Generic (PLEG): container finished" podID="2976d001-3d08-4721-85db-95c0a0de28b8" containerID="c1abab52854f97e2baa52627f04d820409556ef44e46042b8cc88ae108210266" exitCode=0 Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.763950 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5vlb" event={"ID":"2976d001-3d08-4721-85db-95c0a0de28b8","Type":"ContainerDied","Data":"c1abab52854f97e2baa52627f04d820409556ef44e46042b8cc88ae108210266"} Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.766729 4783 generic.go:334] "Generic (PLEG): container finished" podID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerID="054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515" exitCode=0 Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.766793 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.766847 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" event={"ID":"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162","Type":"ContainerDied","Data":"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515"} Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.767442 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d86d68bf7-lxqvg" event={"ID":"3dc2c97a-e1b4-46e9-87bc-3b73f22f1162","Type":"ContainerDied","Data":"e4655cf2c727a47095be9b9c4cebd1b218d88d8e987b5949f07ff4cdd691587b"} Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.767514 4783 scope.go:117] "RemoveContainer" containerID="054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.799137 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrzgg\" (UniqueName: \"kubernetes.io/projected/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-kube-api-access-hrzgg\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.799449 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.799460 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.799468 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.799477 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.814902 4783 scope.go:117] "RemoveContainer" containerID="0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.871938 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.877701 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d86d68bf7-lxqvg"] Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.893706 4783 scope.go:117] "RemoveContainer" containerID="054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515" Sep 30 13:53:16 crc kubenswrapper[4783]: E0930 13:53:16.894101 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515\": container with ID starting with 054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515 not found: ID does not exist" containerID="054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.894130 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515"} err="failed to get container status \"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515\": rpc error: code = NotFound desc = could not find container \"054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515\": container with ID starting with 054a40ae62c6180f73908b28d7e23279564aa631e8bcdb29a4aa0661d1eb7515 not found: ID does not exist" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.894148 4783 scope.go:117] "RemoveContainer" containerID="0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf" Sep 30 13:53:16 crc kubenswrapper[4783]: E0930 13:53:16.894360 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf\": container with ID starting with 0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf not found: ID does not exist" containerID="0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf" Sep 30 13:53:16 crc kubenswrapper[4783]: I0930 13:53:16.894380 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf"} err="failed to get container status \"0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf\": rpc error: code = NotFound desc = could not find container \"0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf\": container with ID starting with 0c9b0fe61bb0bd2d3665af492c41a442c14fa62849558fc26ce9b1d675075ebf not found: ID does not exist" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.036422 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d5qms" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.206427 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcsvs\" (UniqueName: \"kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs\") pod \"c5499617-8645-4b0b-9b0e-5dbe617afc92\" (UID: \"c5499617-8645-4b0b-9b0e-5dbe617afc92\") " Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.212352 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs" (OuterVolumeSpecName: "kube-api-access-xcsvs") pod "c5499617-8645-4b0b-9b0e-5dbe617afc92" (UID: "c5499617-8645-4b0b-9b0e-5dbe617afc92"). InnerVolumeSpecName "kube-api-access-xcsvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.308783 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcsvs\" (UniqueName: \"kubernetes.io/projected/c5499617-8645-4b0b-9b0e-5dbe617afc92-kube-api-access-xcsvs\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.511838 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.518460 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"swift-storage-0\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " pod="openstack/swift-storage-0" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.629536 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.781049 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d5qms" Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.781042 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d5qms" event={"ID":"c5499617-8645-4b0b-9b0e-5dbe617afc92","Type":"ContainerDied","Data":"1f90c4ebb4dde98429aeb96cc6e8b4056d6f413b054cf1b5a438ec15793de90b"} Sep 30 13:53:17 crc kubenswrapper[4783]: I0930 13:53:17.781663 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f90c4ebb4dde98429aeb96cc6e8b4056d6f413b054cf1b5a438ec15793de90b" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.053302 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.190884 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.228388 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.228429 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.228463 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltvqq\" (UniqueName: \"kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.228542 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.229087 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.229150 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.229247 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf\") pod \"2976d001-3d08-4721-85db-95c0a0de28b8\" (UID: \"2976d001-3d08-4721-85db-95c0a0de28b8\") " Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.230073 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.230084 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.234378 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq" (OuterVolumeSpecName: "kube-api-access-ltvqq") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "kube-api-access-ltvqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.234871 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.252020 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts" (OuterVolumeSpecName: "scripts") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.253393 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.257864 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2976d001-3d08-4721-85db-95c0a0de28b8" (UID: "2976d001-3d08-4721-85db-95c0a0de28b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330310 4783 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330342 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330352 4783 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330363 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltvqq\" (UniqueName: \"kubernetes.io/projected/2976d001-3d08-4721-85db-95c0a0de28b8-kube-api-access-ltvqq\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330376 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2976d001-3d08-4721-85db-95c0a0de28b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330385 4783 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2976d001-3d08-4721-85db-95c0a0de28b8-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.330392 4783 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2976d001-3d08-4721-85db-95c0a0de28b8-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.597501 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b208-account-create-xqdqt"] Sep 30 13:53:18 crc kubenswrapper[4783]: E0930 13:53:18.597829 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="dnsmasq-dns" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.597843 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="dnsmasq-dns" Sep 30 13:53:18 crc kubenswrapper[4783]: E0930 13:53:18.597858 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="init" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.597865 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="init" Sep 30 13:53:18 crc kubenswrapper[4783]: E0930 13:53:18.597876 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2976d001-3d08-4721-85db-95c0a0de28b8" containerName="swift-ring-rebalance" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.597883 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2976d001-3d08-4721-85db-95c0a0de28b8" containerName="swift-ring-rebalance" Sep 30 13:53:18 crc kubenswrapper[4783]: E0930 13:53:18.597893 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5499617-8645-4b0b-9b0e-5dbe617afc92" containerName="mariadb-database-create" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.597898 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5499617-8645-4b0b-9b0e-5dbe617afc92" containerName="mariadb-database-create" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.598065 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2976d001-3d08-4721-85db-95c0a0de28b8" containerName="swift-ring-rebalance" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.598079 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5499617-8645-4b0b-9b0e-5dbe617afc92" containerName="mariadb-database-create" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.598088 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" containerName="dnsmasq-dns" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.598586 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.603992 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.605458 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b208-account-create-xqdqt"] Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.633871 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49zlj\" (UniqueName: \"kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj\") pod \"keystone-b208-account-create-xqdqt\" (UID: \"b12cead9-2793-46b5-8654-420df6d90f01\") " pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.735632 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49zlj\" (UniqueName: \"kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj\") pod \"keystone-b208-account-create-xqdqt\" (UID: \"b12cead9-2793-46b5-8654-420df6d90f01\") " pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.751806 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49zlj\" (UniqueName: \"kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj\") pod \"keystone-b208-account-create-xqdqt\" (UID: \"b12cead9-2793-46b5-8654-420df6d90f01\") " pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.790886 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5vlb" event={"ID":"2976d001-3d08-4721-85db-95c0a0de28b8","Type":"ContainerDied","Data":"f83960102295b7c6916eb2caa20b39d6f9f416d9072b62f5fe74d19ae38f96fe"} Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.790915 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5vlb" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.790927 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f83960102295b7c6916eb2caa20b39d6f9f416d9072b62f5fe74d19ae38f96fe" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.793160 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"99da05eaacc7b0a5f19d53d07393eef42f90815ddbdfb308b4ac60fcc78a3217"} Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.874144 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc2c97a-e1b4-46e9-87bc-3b73f22f1162" path="/var/lib/kubelet/pods/3dc2c97a-e1b4-46e9-87bc-3b73f22f1162/volumes" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.899393 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-92eb-account-create-pr6m9"] Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.900730 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.906655 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-92eb-account-create-pr6m9"] Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.944413 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.944592 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:18 crc kubenswrapper[4783]: I0930 13:53:18.946278 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65rcb\" (UniqueName: \"kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb\") pod \"placement-92eb-account-create-pr6m9\" (UID: \"50128352-8027-4b7a-af43-18310b14ca16\") " pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.047621 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65rcb\" (UniqueName: \"kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb\") pod \"placement-92eb-account-create-pr6m9\" (UID: \"50128352-8027-4b7a-af43-18310b14ca16\") " pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.073635 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65rcb\" (UniqueName: \"kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb\") pod \"placement-92eb-account-create-pr6m9\" (UID: \"50128352-8027-4b7a-af43-18310b14ca16\") " pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.263061 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.431143 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b208-account-create-xqdqt"] Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.435289 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-th6r6" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" probeResult="failure" output=< Sep 30 13:53:19 crc kubenswrapper[4783]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 13:53:19 crc kubenswrapper[4783]: > Sep 30 13:53:19 crc kubenswrapper[4783]: W0930 13:53:19.448671 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb12cead9_2793_46b5_8654_420df6d90f01.slice/crio-0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874 WatchSource:0}: Error finding container 0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874: Status 404 returned error can't find the container with id 0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874 Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.805848 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b208-account-create-xqdqt" event={"ID":"b12cead9-2793-46b5-8654-420df6d90f01","Type":"ContainerStarted","Data":"12c7b943e3e515968317bb4e2b36cb7ec137674e1291a8c6474038902d7af06c"} Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.806250 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b208-account-create-xqdqt" event={"ID":"b12cead9-2793-46b5-8654-420df6d90f01","Type":"ContainerStarted","Data":"0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874"} Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.810829 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73"} Sep 30 13:53:19 crc kubenswrapper[4783]: I0930 13:53:19.853800 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-92eb-account-create-pr6m9"] Sep 30 13:53:19 crc kubenswrapper[4783]: W0930 13:53:19.868980 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50128352_8027_4b7a_af43_18310b14ca16.slice/crio-c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386 WatchSource:0}: Error finding container c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386: Status 404 returned error can't find the container with id c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386 Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.821343 4783 generic.go:334] "Generic (PLEG): container finished" podID="50128352-8027-4b7a-af43-18310b14ca16" containerID="c7f3a38555bdabc4adc448d88761adfa98cc0de755f8e23c6c57230e33e5157b" exitCode=0 Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.821390 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-92eb-account-create-pr6m9" event={"ID":"50128352-8027-4b7a-af43-18310b14ca16","Type":"ContainerDied","Data":"c7f3a38555bdabc4adc448d88761adfa98cc0de755f8e23c6c57230e33e5157b"} Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.821856 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-92eb-account-create-pr6m9" event={"ID":"50128352-8027-4b7a-af43-18310b14ca16","Type":"ContainerStarted","Data":"c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386"} Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.824879 4783 generic.go:334] "Generic (PLEG): container finished" podID="b12cead9-2793-46b5-8654-420df6d90f01" containerID="12c7b943e3e515968317bb4e2b36cb7ec137674e1291a8c6474038902d7af06c" exitCode=0 Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.824956 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b208-account-create-xqdqt" event={"ID":"b12cead9-2793-46b5-8654-420df6d90f01","Type":"ContainerDied","Data":"12c7b943e3e515968317bb4e2b36cb7ec137674e1291a8c6474038902d7af06c"} Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.828639 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898"} Sep 30 13:53:20 crc kubenswrapper[4783]: I0930 13:53:20.828692 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1"} Sep 30 13:53:21 crc kubenswrapper[4783]: I0930 13:53:21.992919 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.017270 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49zlj\" (UniqueName: \"kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj\") pod \"b12cead9-2793-46b5-8654-420df6d90f01\" (UID: \"b12cead9-2793-46b5-8654-420df6d90f01\") " Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.026989 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj" (OuterVolumeSpecName: "kube-api-access-49zlj") pod "b12cead9-2793-46b5-8654-420df6d90f01" (UID: "b12cead9-2793-46b5-8654-420df6d90f01"). InnerVolumeSpecName "kube-api-access-49zlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.119354 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49zlj\" (UniqueName: \"kubernetes.io/projected/b12cead9-2793-46b5-8654-420df6d90f01-kube-api-access-49zlj\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.246159 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.321514 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65rcb\" (UniqueName: \"kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb\") pod \"50128352-8027-4b7a-af43-18310b14ca16\" (UID: \"50128352-8027-4b7a-af43-18310b14ca16\") " Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.324576 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb" (OuterVolumeSpecName: "kube-api-access-65rcb") pod "50128352-8027-4b7a-af43-18310b14ca16" (UID: "50128352-8027-4b7a-af43-18310b14ca16"). InnerVolumeSpecName "kube-api-access-65rcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.423675 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65rcb\" (UniqueName: \"kubernetes.io/projected/50128352-8027-4b7a-af43-18310b14ca16-kube-api-access-65rcb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.853085 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-92eb-account-create-pr6m9" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.856070 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b208-account-create-xqdqt" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.859406 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-92eb-account-create-pr6m9" event={"ID":"50128352-8027-4b7a-af43-18310b14ca16","Type":"ContainerDied","Data":"c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386"} Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.859464 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c20bcaa25b5ab4dd2cb4db84e6ac159947ff00c6a152238a64a92369e3dd6386" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.859485 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b208-account-create-xqdqt" event={"ID":"b12cead9-2793-46b5-8654-420df6d90f01","Type":"ContainerDied","Data":"0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874"} Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.859508 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c98daab4af2b2e5592bbb06c48f5e13031571bdcd9ae3449a8c8369a20e4874" Sep 30 13:53:22 crc kubenswrapper[4783]: I0930 13:53:22.861635 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3"} Sep 30 13:53:23 crc kubenswrapper[4783]: I0930 13:53:23.875332 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7"} Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.233354 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-e8d3-account-create-whrv6"] Sep 30 13:53:24 crc kubenswrapper[4783]: E0930 13:53:24.233657 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b12cead9-2793-46b5-8654-420df6d90f01" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.233673 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b12cead9-2793-46b5-8654-420df6d90f01" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: E0930 13:53:24.233684 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50128352-8027-4b7a-af43-18310b14ca16" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.233691 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="50128352-8027-4b7a-af43-18310b14ca16" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.233862 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b12cead9-2793-46b5-8654-420df6d90f01" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.233889 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="50128352-8027-4b7a-af43-18310b14ca16" containerName="mariadb-account-create" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.234406 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.237080 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.251299 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-e8d3-account-create-whrv6"] Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.254504 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvppl\" (UniqueName: \"kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl\") pod \"glance-e8d3-account-create-whrv6\" (UID: \"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f\") " pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.358333 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvppl\" (UniqueName: \"kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl\") pod \"glance-e8d3-account-create-whrv6\" (UID: \"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f\") " pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.377538 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvppl\" (UniqueName: \"kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl\") pod \"glance-e8d3-account-create-whrv6\" (UID: \"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f\") " pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.435528 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-th6r6" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" probeResult="failure" output=< Sep 30 13:53:24 crc kubenswrapper[4783]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 13:53:24 crc kubenswrapper[4783]: > Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.493933 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.496338 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.561691 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.754592 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-th6r6-config-ztxzf"] Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.755927 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.758192 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.782083 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-ztxzf"] Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869587 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869639 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869695 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzvk7\" (UniqueName: \"kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869722 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869737 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.869783 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.891749 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c"} Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.891785 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3"} Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.891796 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285"} Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.923442 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973038 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973099 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973152 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzvk7\" (UniqueName: \"kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973183 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973203 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.973258 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.974085 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.974356 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.974408 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.979510 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:24 crc kubenswrapper[4783]: I0930 13:53:24.979721 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.000110 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzvk7\" (UniqueName: \"kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7\") pod \"ovn-controller-th6r6-config-ztxzf\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.041577 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-e8d3-account-create-whrv6"] Sep 30 13:53:25 crc kubenswrapper[4783]: W0930 13:53:25.046418 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2c0f88a_3106_4f9e_b41d_d9fa4542a24f.slice/crio-6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af WatchSource:0}: Error finding container 6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af: Status 404 returned error can't find the container with id 6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.082949 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.534061 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-ztxzf"] Sep 30 13:53:25 crc kubenswrapper[4783]: W0930 13:53:25.547109 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f5126d_e5a8_4464_b461_7ccfe5cf329b.slice/crio-37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e WatchSource:0}: Error finding container 37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e: Status 404 returned error can't find the container with id 37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.805930 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.903875 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-ztxzf" event={"ID":"a3f5126d-e5a8-4464-b461-7ccfe5cf329b","Type":"ContainerStarted","Data":"37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e"} Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.905145 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e8d3-account-create-whrv6" event={"ID":"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f","Type":"ContainerStarted","Data":"1c45e758b3738d7a28ed877e2f4c5febbca063c9419d9656e6d78dcf68514e8c"} Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.905192 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e8d3-account-create-whrv6" event={"ID":"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f","Type":"ContainerStarted","Data":"6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af"} Sep 30 13:53:25 crc kubenswrapper[4783]: I0930 13:53:25.929900 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-e8d3-account-create-whrv6" podStartSLOduration=1.929880077 podStartE2EDuration="1.929880077s" podCreationTimestamp="2025-09-30 13:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:25.923346669 +0000 UTC m=+1105.854812976" watchObservedRunningTime="2025-09-30 13:53:25.929880077 +0000 UTC m=+1105.861346404" Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.765384 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-bs8sx"] Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.766734 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.783101 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bs8sx"] Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.884839 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-sctg2"] Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.885757 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.908968 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-sctg2"] Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.913887 4783 generic.go:334] "Generic (PLEG): container finished" podID="c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" containerID="1c45e758b3738d7a28ed877e2f4c5febbca063c9419d9656e6d78dcf68514e8c" exitCode=0 Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.913974 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e8d3-account-create-whrv6" event={"ID":"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f","Type":"ContainerDied","Data":"1c45e758b3738d7a28ed877e2f4c5febbca063c9419d9656e6d78dcf68514e8c"} Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.915793 4783 generic.go:334] "Generic (PLEG): container finished" podID="a3f5126d-e5a8-4464-b461-7ccfe5cf329b" containerID="d9380ce9afc2c1b59bb9f34b66876f19ee2fb153090277a0b45e42e71f7bdc8a" exitCode=0 Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.915836 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-ztxzf" event={"ID":"a3f5126d-e5a8-4464-b461-7ccfe5cf329b","Type":"ContainerDied","Data":"d9380ce9afc2c1b59bb9f34b66876f19ee2fb153090277a0b45e42e71f7bdc8a"} Sep 30 13:53:26 crc kubenswrapper[4783]: I0930 13:53:26.923094 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgsmw\" (UniqueName: \"kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw\") pod \"cinder-db-create-bs8sx\" (UID: \"d9ba6a72-2550-4331-bdd0-12b192b5bc8a\") " pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.024459 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgsmw\" (UniqueName: \"kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw\") pod \"cinder-db-create-bs8sx\" (UID: \"d9ba6a72-2550-4331-bdd0-12b192b5bc8a\") " pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.024615 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xqwh\" (UniqueName: \"kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh\") pod \"barbican-db-create-sctg2\" (UID: \"7c228744-0cbc-44ae-9539-7cd32f195543\") " pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.077103 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgsmw\" (UniqueName: \"kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw\") pod \"cinder-db-create-bs8sx\" (UID: \"d9ba6a72-2550-4331-bdd0-12b192b5bc8a\") " pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.107535 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-fqsdm"] Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.108942 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.111960 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-grwfp" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.111966 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.112553 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.115043 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.123918 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.125448 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xqwh\" (UniqueName: \"kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh\") pod \"barbican-db-create-sctg2\" (UID: \"7c228744-0cbc-44ae-9539-7cd32f195543\") " pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.127528 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fqsdm"] Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.147185 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xqwh\" (UniqueName: \"kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh\") pod \"barbican-db-create-sctg2\" (UID: \"7c228744-0cbc-44ae-9539-7cd32f195543\") " pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.202045 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.210260 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-z7899"] Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.212257 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7899" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.236464 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsl79\" (UniqueName: \"kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.236515 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.236553 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.236618 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z27j5\" (UniqueName: \"kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5\") pod \"neutron-db-create-z7899\" (UID: \"5e80664f-1f1f-4183-a655-ca8f7e8e4af8\") " pod="openstack/neutron-db-create-z7899" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.238791 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7899"] Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.337773 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsl79\" (UniqueName: \"kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.338106 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.338162 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.338258 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z27j5\" (UniqueName: \"kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5\") pod \"neutron-db-create-z7899\" (UID: \"5e80664f-1f1f-4183-a655-ca8f7e8e4af8\") " pod="openstack/neutron-db-create-z7899" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.345539 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.357838 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.361716 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z27j5\" (UniqueName: \"kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5\") pod \"neutron-db-create-z7899\" (UID: \"5e80664f-1f1f-4183-a655-ca8f7e8e4af8\") " pod="openstack/neutron-db-create-z7899" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.374401 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsl79\" (UniqueName: \"kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79\") pod \"keystone-db-sync-fqsdm\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.426539 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.552448 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7899" Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.645850 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bs8sx"] Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.954712 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bs8sx" event={"ID":"d9ba6a72-2550-4331-bdd0-12b192b5bc8a","Type":"ContainerStarted","Data":"74ad5198c33bca2dc2c18003a512d1e4a71bde8c329802d06a71c3b072d428d1"} Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.955173 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bs8sx" event={"ID":"d9ba6a72-2550-4331-bdd0-12b192b5bc8a","Type":"ContainerStarted","Data":"91b3c8994b17a1ad977a0c63a03a1d5acd2d4fce5bf5f96e57fa402bd1a15c13"} Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.962873 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8"} Sep 30 13:53:27 crc kubenswrapper[4783]: I0930 13:53:27.973287 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-bs8sx" podStartSLOduration=1.973269339 podStartE2EDuration="1.973269339s" podCreationTimestamp="2025-09-30 13:53:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:27.971084539 +0000 UTC m=+1107.902550846" watchObservedRunningTime="2025-09-30 13:53:27.973269339 +0000 UTC m=+1107.904735656" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.079856 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fqsdm"] Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.150494 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7899"] Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.157500 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-sctg2"] Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.473541 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.501893 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.661999 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662058 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662082 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662173 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzvk7\" (UniqueName: \"kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662210 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvppl\" (UniqueName: \"kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl\") pod \"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f\" (UID: \"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662243 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662302 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts\") pod \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\" (UID: \"a3f5126d-e5a8-4464-b461-7ccfe5cf329b\") " Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662490 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662877 4783 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662906 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.662971 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run" (OuterVolumeSpecName: "var-run") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.663164 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.664346 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts" (OuterVolumeSpecName: "scripts") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.669027 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7" (OuterVolumeSpecName: "kube-api-access-zzvk7") pod "a3f5126d-e5a8-4464-b461-7ccfe5cf329b" (UID: "a3f5126d-e5a8-4464-b461-7ccfe5cf329b"). InnerVolumeSpecName "kube-api-access-zzvk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.672193 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl" (OuterVolumeSpecName: "kube-api-access-mvppl") pod "c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" (UID: "c2c0f88a-3106-4f9e-b41d-d9fa4542a24f"). InnerVolumeSpecName "kube-api-access-mvppl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.765869 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.765959 4783 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.765988 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzvk7\" (UniqueName: \"kubernetes.io/projected/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-kube-api-access-zzvk7\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.766000 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvppl\" (UniqueName: \"kubernetes.io/projected/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f-kube-api-access-mvppl\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.766008 4783 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.766016 4783 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f5126d-e5a8-4464-b461-7ccfe5cf329b-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.984215 4783 generic.go:334] "Generic (PLEG): container finished" podID="5e80664f-1f1f-4183-a655-ca8f7e8e4af8" containerID="d32de830a691d8a9e36aaad1a0ce5660cd686be4c09ee1c140bec330697b3d16" exitCode=0 Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.984307 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7899" event={"ID":"5e80664f-1f1f-4183-a655-ca8f7e8e4af8","Type":"ContainerDied","Data":"d32de830a691d8a9e36aaad1a0ce5660cd686be4c09ee1c140bec330697b3d16"} Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.984810 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7899" event={"ID":"5e80664f-1f1f-4183-a655-ca8f7e8e4af8","Type":"ContainerStarted","Data":"1a0ebc6feb2a3b4562e3a6b8c96ee541ef777da7884b7868c2a479b999218b5b"} Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.999375 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f"} Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.999422 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef"} Sep 30 13:53:28 crc kubenswrapper[4783]: I0930 13:53:28.999433 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.010061 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fqsdm" event={"ID":"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1","Type":"ContainerStarted","Data":"79f06eddfb7e3add08d42a00c9056ca9eeb2b346daeba598bf625d3b63b2351d"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.013131 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sctg2" event={"ID":"7c228744-0cbc-44ae-9539-7cd32f195543","Type":"ContainerDied","Data":"d5c970b20a9e48b0f5f3c33d731065cf4ba92eb88450078d3f4745e63e8925f9"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.012925 4783 generic.go:334] "Generic (PLEG): container finished" podID="7c228744-0cbc-44ae-9539-7cd32f195543" containerID="d5c970b20a9e48b0f5f3c33d731065cf4ba92eb88450078d3f4745e63e8925f9" exitCode=0 Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.015019 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sctg2" event={"ID":"7c228744-0cbc-44ae-9539-7cd32f195543","Type":"ContainerStarted","Data":"2b9930e84e7e2082d625fe23fb255ae2c09ef7475eb1898cb1d861d622643f61"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.018866 4783 generic.go:334] "Generic (PLEG): container finished" podID="d9ba6a72-2550-4331-bdd0-12b192b5bc8a" containerID="74ad5198c33bca2dc2c18003a512d1e4a71bde8c329802d06a71c3b072d428d1" exitCode=0 Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.019119 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bs8sx" event={"ID":"d9ba6a72-2550-4331-bdd0-12b192b5bc8a","Type":"ContainerDied","Data":"74ad5198c33bca2dc2c18003a512d1e4a71bde8c329802d06a71c3b072d428d1"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.021373 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e8d3-account-create-whrv6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.021358 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e8d3-account-create-whrv6" event={"ID":"c2c0f88a-3106-4f9e-b41d-d9fa4542a24f","Type":"ContainerDied","Data":"6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.021569 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b821f2785d67974f24213d0f701e1a74faa3bf8f025cfeb38394fff6f4284af" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.040148 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-ztxzf" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.040694 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-ztxzf" event={"ID":"a3f5126d-e5a8-4464-b461-7ccfe5cf329b","Type":"ContainerDied","Data":"37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e"} Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.040736 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37f4053b1152da95612f396c9acd99d937ca77ae96e70d12e0771e536f70393e" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.430765 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-th6r6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.478446 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-mvmd6"] Sep 30 13:53:29 crc kubenswrapper[4783]: E0930 13:53:29.479464 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" containerName="mariadb-account-create" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.479491 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" containerName="mariadb-account-create" Sep 30 13:53:29 crc kubenswrapper[4783]: E0930 13:53:29.479517 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f5126d-e5a8-4464-b461-7ccfe5cf329b" containerName="ovn-config" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.479526 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f5126d-e5a8-4464-b461-7ccfe5cf329b" containerName="ovn-config" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.479842 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" containerName="mariadb-account-create" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.479897 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f5126d-e5a8-4464-b461-7ccfe5cf329b" containerName="ovn-config" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.480786 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.485893 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-r4nf8" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.486253 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.488179 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-mvmd6"] Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.488780 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zdxq\" (UniqueName: \"kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.488986 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.489025 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.489050 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.590199 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.590568 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.590587 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.590616 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zdxq\" (UniqueName: \"kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.596288 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.597793 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.598872 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.612439 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-th6r6-config-ztxzf"] Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.614121 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zdxq\" (UniqueName: \"kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq\") pod \"glance-db-sync-mvmd6\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.661544 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-th6r6-config-ztxzf"] Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.669985 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-th6r6-config-42fzr"] Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.671395 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.674345 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.679961 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-42fzr"] Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.797177 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798157 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798192 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798269 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798339 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798453 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.798525 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nrxl\" (UniqueName: \"kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.901646 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.902101 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.902201 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.902292 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nrxl\" (UniqueName: \"kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.902770 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.903182 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.903653 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.903887 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.904285 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.904425 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.904599 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:29 crc kubenswrapper[4783]: I0930 13:53:29.925895 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nrxl\" (UniqueName: \"kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl\") pod \"ovn-controller-th6r6-config-42fzr\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.010774 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.055169 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44"} Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.055219 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50"} Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.055249 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerStarted","Data":"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a"} Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.125596 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.746952109 podStartE2EDuration="30.12557915s" podCreationTimestamp="2025-09-30 13:53:00 +0000 UTC" firstStartedPulling="2025-09-30 13:53:18.199336794 +0000 UTC m=+1098.130803101" lastFinishedPulling="2025-09-30 13:53:27.577963835 +0000 UTC m=+1107.509430142" observedRunningTime="2025-09-30 13:53:30.119284248 +0000 UTC m=+1110.050750565" watchObservedRunningTime="2025-09-30 13:53:30.12557915 +0000 UTC m=+1110.057045457" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.356658 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-mvmd6"] Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.384363 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.385789 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.389502 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.399565 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447721 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447756 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447784 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447806 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2r6k\" (UniqueName: \"kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447841 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.447873 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549457 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549523 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549559 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549632 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2r6k\" (UniqueName: \"kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549676 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.549744 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.550562 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.550903 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.551010 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.551025 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.551680 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.570240 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2r6k\" (UniqueName: \"kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k\") pod \"dnsmasq-dns-6cfbb96789-5wdf6\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.587950 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.595165 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7899" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.599308 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.609845 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.650047 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z27j5\" (UniqueName: \"kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5\") pod \"5e80664f-1f1f-4183-a655-ca8f7e8e4af8\" (UID: \"5e80664f-1f1f-4183-a655-ca8f7e8e4af8\") " Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.650143 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xqwh\" (UniqueName: \"kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh\") pod \"7c228744-0cbc-44ae-9539-7cd32f195543\" (UID: \"7c228744-0cbc-44ae-9539-7cd32f195543\") " Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.650234 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgsmw\" (UniqueName: \"kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw\") pod \"d9ba6a72-2550-4331-bdd0-12b192b5bc8a\" (UID: \"d9ba6a72-2550-4331-bdd0-12b192b5bc8a\") " Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.653880 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5" (OuterVolumeSpecName: "kube-api-access-z27j5") pod "5e80664f-1f1f-4183-a655-ca8f7e8e4af8" (UID: "5e80664f-1f1f-4183-a655-ca8f7e8e4af8"). InnerVolumeSpecName "kube-api-access-z27j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.654434 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh" (OuterVolumeSpecName: "kube-api-access-9xqwh") pod "7c228744-0cbc-44ae-9539-7cd32f195543" (UID: "7c228744-0cbc-44ae-9539-7cd32f195543"). InnerVolumeSpecName "kube-api-access-9xqwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.660153 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw" (OuterVolumeSpecName: "kube-api-access-xgsmw") pod "d9ba6a72-2550-4331-bdd0-12b192b5bc8a" (UID: "d9ba6a72-2550-4331-bdd0-12b192b5bc8a"). InnerVolumeSpecName "kube-api-access-xgsmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:30 crc kubenswrapper[4783]: I0930 13:53:30.698673 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-42fzr"] Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:30.752749 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xqwh\" (UniqueName: \"kubernetes.io/projected/7c228744-0cbc-44ae-9539-7cd32f195543-kube-api-access-9xqwh\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:30.752773 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgsmw\" (UniqueName: \"kubernetes.io/projected/d9ba6a72-2550-4331-bdd0-12b192b5bc8a-kube-api-access-xgsmw\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:30.752783 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z27j5\" (UniqueName: \"kubernetes.io/projected/5e80664f-1f1f-4183-a655-ca8f7e8e4af8-kube-api-access-z27j5\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:30.870417 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f5126d-e5a8-4464-b461-7ccfe5cf329b" path="/var/lib/kubelet/pods/a3f5126d-e5a8-4464-b461-7ccfe5cf329b/volumes" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.066919 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-42fzr" event={"ID":"269f4a90-9c85-4e28-ba80-db63ac29187a","Type":"ContainerStarted","Data":"1b212802d2ca2c843cc4cf102c1b95cee35c87b29e27691f362ff32bcad97ce9"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.068534 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mvmd6" event={"ID":"13707881-f4b3-4fea-b926-3724eb156688","Type":"ContainerStarted","Data":"77a7098a45a8170a92dd803ff4a21392788224b2bf58b77e1d481a1d33b5be3b"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.070777 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sctg2" event={"ID":"7c228744-0cbc-44ae-9539-7cd32f195543","Type":"ContainerDied","Data":"2b9930e84e7e2082d625fe23fb255ae2c09ef7475eb1898cb1d861d622643f61"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.070791 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sctg2" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.070818 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b9930e84e7e2082d625fe23fb255ae2c09ef7475eb1898cb1d861d622643f61" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.072783 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bs8sx" event={"ID":"d9ba6a72-2550-4331-bdd0-12b192b5bc8a","Type":"ContainerDied","Data":"91b3c8994b17a1ad977a0c63a03a1d5acd2d4fce5bf5f96e57fa402bd1a15c13"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.072817 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91b3c8994b17a1ad977a0c63a03a1d5acd2d4fce5bf5f96e57fa402bd1a15c13" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.072872 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bs8sx" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.075843 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7899" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.075887 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7899" event={"ID":"5e80664f-1f1f-4183-a655-ca8f7e8e4af8","Type":"ContainerDied","Data":"1a0ebc6feb2a3b4562e3a6b8c96ee541ef777da7884b7868c2a479b999218b5b"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:31.075927 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a0ebc6feb2a3b4562e3a6b8c96ee541ef777da7884b7868c2a479b999218b5b" Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:32.090027 4783 generic.go:334] "Generic (PLEG): container finished" podID="269f4a90-9c85-4e28-ba80-db63ac29187a" containerID="45d206da77fcb67e119f037fcfa5d0b583642d09013a408c9ef81aa0123902e0" exitCode=0 Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:32.090160 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-42fzr" event={"ID":"269f4a90-9c85-4e28-ba80-db63ac29187a","Type":"ContainerDied","Data":"45d206da77fcb67e119f037fcfa5d0b583642d09013a408c9ef81aa0123902e0"} Sep 30 13:53:32 crc kubenswrapper[4783]: I0930 13:53:32.672083 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.821282 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.951666 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.951765 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.951782 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.951871 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nrxl\" (UniqueName: \"kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.951879 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952078 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952106 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952183 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run\") pod \"269f4a90-9c85-4e28-ba80-db63ac29187a\" (UID: \"269f4a90-9c85-4e28-ba80-db63ac29187a\") " Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952383 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run" (OuterVolumeSpecName: "var-run") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952940 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952980 4783 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.952996 4783 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.953006 4783 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/269f4a90-9c85-4e28-ba80-db63ac29187a-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.953066 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts" (OuterVolumeSpecName: "scripts") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:35 crc kubenswrapper[4783]: I0930 13:53:35.954985 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl" (OuterVolumeSpecName: "kube-api-access-2nrxl") pod "269f4a90-9c85-4e28-ba80-db63ac29187a" (UID: "269f4a90-9c85-4e28-ba80-db63ac29187a"). InnerVolumeSpecName "kube-api-access-2nrxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.054814 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nrxl\" (UniqueName: \"kubernetes.io/projected/269f4a90-9c85-4e28-ba80-db63ac29187a-kube-api-access-2nrxl\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.055299 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.055312 4783 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/269f4a90-9c85-4e28-ba80-db63ac29187a-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.125675 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-42fzr" event={"ID":"269f4a90-9c85-4e28-ba80-db63ac29187a","Type":"ContainerDied","Data":"1b212802d2ca2c843cc4cf102c1b95cee35c87b29e27691f362ff32bcad97ce9"} Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.125737 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b212802d2ca2c843cc4cf102c1b95cee35c87b29e27691f362ff32bcad97ce9" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.125757 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-42fzr" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.128153 4783 generic.go:334] "Generic (PLEG): container finished" podID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerID="ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b" exitCode=0 Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.128203 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" event={"ID":"45bc7e76-3536-419f-8f07-6b4c4554295e","Type":"ContainerDied","Data":"ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b"} Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.128252 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" event={"ID":"45bc7e76-3536-419f-8f07-6b4c4554295e","Type":"ContainerStarted","Data":"e2e835537f9ef248d587b4270d8d88c3e43f6830842fcf2c549035a96880108f"} Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.131252 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fqsdm" event={"ID":"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1","Type":"ContainerStarted","Data":"dbcbbb2c21a7bbf5f6862e92534c203025b94a1782ab43292ea1052cc9b2702e"} Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.178068 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-fqsdm" podStartSLOduration=1.325134842 podStartE2EDuration="9.178047496s" podCreationTimestamp="2025-09-30 13:53:27 +0000 UTC" firstStartedPulling="2025-09-30 13:53:28.078460189 +0000 UTC m=+1108.009926496" lastFinishedPulling="2025-09-30 13:53:35.931372843 +0000 UTC m=+1115.862839150" observedRunningTime="2025-09-30 13:53:36.173147208 +0000 UTC m=+1116.104613585" watchObservedRunningTime="2025-09-30 13:53:36.178047496 +0000 UTC m=+1116.109513793" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.808007 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-230c-account-create-9rv9g"] Sep 30 13:53:36 crc kubenswrapper[4783]: E0930 13:53:36.808667 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c228744-0cbc-44ae-9539-7cd32f195543" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.808700 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c228744-0cbc-44ae-9539-7cd32f195543" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: E0930 13:53:36.808725 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e80664f-1f1f-4183-a655-ca8f7e8e4af8" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.808737 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e80664f-1f1f-4183-a655-ca8f7e8e4af8" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: E0930 13:53:36.808783 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ba6a72-2550-4331-bdd0-12b192b5bc8a" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.808797 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ba6a72-2550-4331-bdd0-12b192b5bc8a" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: E0930 13:53:36.808819 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269f4a90-9c85-4e28-ba80-db63ac29187a" containerName="ovn-config" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.808830 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="269f4a90-9c85-4e28-ba80-db63ac29187a" containerName="ovn-config" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.809190 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="269f4a90-9c85-4e28-ba80-db63ac29187a" containerName="ovn-config" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.809253 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ba6a72-2550-4331-bdd0-12b192b5bc8a" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.809282 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c228744-0cbc-44ae-9539-7cd32f195543" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.809648 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e80664f-1f1f-4183-a655-ca8f7e8e4af8" containerName="mariadb-database-create" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.810621 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.813041 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.817921 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-230c-account-create-9rv9g"] Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.891326 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-th6r6-config-42fzr"] Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.895515 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-th6r6-config-42fzr"] Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.928684 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-th6r6-config-9jmbs"] Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.930812 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.935471 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.950971 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-9jmbs"] Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975478 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txcrk\" (UniqueName: \"kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975532 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntcr\" (UniqueName: \"kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr\") pod \"cinder-230c-account-create-9rv9g\" (UID: \"01ac8973-fc47-4f89-86d2-b973ef33a21d\") " pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975557 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975598 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975629 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975665 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:36 crc kubenswrapper[4783]: I0930 13:53:36.975701 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.041920 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8272-account-create-2x85s"] Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.065805 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8272-account-create-2x85s"] Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.065915 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.069106 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083404 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083467 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083500 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083532 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083562 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083608 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txcrk\" (UniqueName: \"kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.083633 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntcr\" (UniqueName: \"kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr\") pod \"cinder-230c-account-create-9rv9g\" (UID: \"01ac8973-fc47-4f89-86d2-b973ef33a21d\") " pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.098305 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.098408 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.098993 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.100462 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.100513 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.133683 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntcr\" (UniqueName: \"kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr\") pod \"cinder-230c-account-create-9rv9g\" (UID: \"01ac8973-fc47-4f89-86d2-b973ef33a21d\") " pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.136693 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txcrk\" (UniqueName: \"kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk\") pod \"ovn-controller-th6r6-config-9jmbs\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.147141 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.187013 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn5sx\" (UniqueName: \"kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx\") pod \"barbican-8272-account-create-2x85s\" (UID: \"25530b76-7a63-45b8-b096-492a37a7237d\") " pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.220821 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ac9c-account-create-lkwhv"] Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.222121 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.224908 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.231922 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ac9c-account-create-lkwhv"] Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.257880 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.288390 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn5sx\" (UniqueName: \"kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx\") pod \"barbican-8272-account-create-2x85s\" (UID: \"25530b76-7a63-45b8-b096-492a37a7237d\") " pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.288483 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxksd\" (UniqueName: \"kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd\") pod \"neutron-ac9c-account-create-lkwhv\" (UID: \"011eca05-b58b-4412-b0d8-3700bb26099b\") " pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.304569 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn5sx\" (UniqueName: \"kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx\") pod \"barbican-8272-account-create-2x85s\" (UID: \"25530b76-7a63-45b8-b096-492a37a7237d\") " pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.389404 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxksd\" (UniqueName: \"kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd\") pod \"neutron-ac9c-account-create-lkwhv\" (UID: \"011eca05-b58b-4412-b0d8-3700bb26099b\") " pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.405927 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxksd\" (UniqueName: \"kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd\") pod \"neutron-ac9c-account-create-lkwhv\" (UID: \"011eca05-b58b-4412-b0d8-3700bb26099b\") " pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.485188 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:37 crc kubenswrapper[4783]: I0930 13:53:37.536064 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:38 crc kubenswrapper[4783]: I0930 13:53:38.880262 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269f4a90-9c85-4e28-ba80-db63ac29187a" path="/var/lib/kubelet/pods/269f4a90-9c85-4e28-ba80-db63ac29187a/volumes" Sep 30 13:53:44 crc kubenswrapper[4783]: I0930 13:53:44.689693 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ac9c-account-create-lkwhv"] Sep 30 13:53:44 crc kubenswrapper[4783]: W0930 13:53:44.692928 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod011eca05_b58b_4412_b0d8_3700bb26099b.slice/crio-aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752 WatchSource:0}: Error finding container aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752: Status 404 returned error can't find the container with id aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752 Sep 30 13:53:44 crc kubenswrapper[4783]: I0930 13:53:44.806978 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8272-account-create-2x85s"] Sep 30 13:53:44 crc kubenswrapper[4783]: W0930 13:53:44.812325 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25530b76_7a63_45b8_b096_492a37a7237d.slice/crio-fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2 WatchSource:0}: Error finding container fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2: Status 404 returned error can't find the container with id fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2 Sep 30 13:53:44 crc kubenswrapper[4783]: I0930 13:53:44.820003 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-th6r6-config-9jmbs"] Sep 30 13:53:44 crc kubenswrapper[4783]: I0930 13:53:44.861650 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-230c-account-create-9rv9g"] Sep 30 13:53:44 crc kubenswrapper[4783]: W0930 13:53:44.868082 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01ac8973_fc47_4f89_86d2_b973ef33a21d.slice/crio-dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1 WatchSource:0}: Error finding container dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1: Status 404 returned error can't find the container with id dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1 Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.217691 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" event={"ID":"45bc7e76-3536-419f-8f07-6b4c4554295e","Type":"ContainerStarted","Data":"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.219331 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.224760 4783 generic.go:334] "Generic (PLEG): container finished" podID="01ac8973-fc47-4f89-86d2-b973ef33a21d" containerID="bc89642497736475077a9ea1aa1c941d6da45da0f7b5e515ee7b27b1d5853429" exitCode=0 Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.224827 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-230c-account-create-9rv9g" event={"ID":"01ac8973-fc47-4f89-86d2-b973ef33a21d","Type":"ContainerDied","Data":"bc89642497736475077a9ea1aa1c941d6da45da0f7b5e515ee7b27b1d5853429"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.224866 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-230c-account-create-9rv9g" event={"ID":"01ac8973-fc47-4f89-86d2-b973ef33a21d","Type":"ContainerStarted","Data":"dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.226558 4783 generic.go:334] "Generic (PLEG): container finished" podID="25530b76-7a63-45b8-b096-492a37a7237d" containerID="c50c006678d45fd361c7404fabc08415ff8162e67e9c073e3bba83e1036f18e6" exitCode=0 Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.226618 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8272-account-create-2x85s" event={"ID":"25530b76-7a63-45b8-b096-492a37a7237d","Type":"ContainerDied","Data":"c50c006678d45fd361c7404fabc08415ff8162e67e9c073e3bba83e1036f18e6"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.226645 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8272-account-create-2x85s" event={"ID":"25530b76-7a63-45b8-b096-492a37a7237d","Type":"ContainerStarted","Data":"fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.227879 4783 generic.go:334] "Generic (PLEG): container finished" podID="011eca05-b58b-4412-b0d8-3700bb26099b" containerID="3ef861bef2f56add4869192e5592580e98d2db290f25ea904d3a2752fa43affc" exitCode=0 Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.227928 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ac9c-account-create-lkwhv" event={"ID":"011eca05-b58b-4412-b0d8-3700bb26099b","Type":"ContainerDied","Data":"3ef861bef2f56add4869192e5592580e98d2db290f25ea904d3a2752fa43affc"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.227943 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ac9c-account-create-lkwhv" event={"ID":"011eca05-b58b-4412-b0d8-3700bb26099b","Type":"ContainerStarted","Data":"aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.229296 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-9jmbs" event={"ID":"2070f204-8f44-4bcb-8082-440aed622fbf","Type":"ContainerStarted","Data":"3330d8fe05c885ba215ebd72777bfc52677ce62dfab3328787084eb49afdb9d8"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.231081 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mvmd6" event={"ID":"13707881-f4b3-4fea-b926-3724eb156688","Type":"ContainerStarted","Data":"de7d813759552084e588f18ac7d2fa4049833b8b17f8a47d3bf8b8918e2749e6"} Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.241891 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" podStartSLOduration=15.24181395 podStartE2EDuration="15.24181395s" podCreationTimestamp="2025-09-30 13:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:45.233630168 +0000 UTC m=+1125.165096475" watchObservedRunningTime="2025-09-30 13:53:45.24181395 +0000 UTC m=+1125.173280277" Sep 30 13:53:45 crc kubenswrapper[4783]: I0930 13:53:45.267483 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-mvmd6" podStartSLOduration=2.332597498 podStartE2EDuration="16.267466412s" podCreationTimestamp="2025-09-30 13:53:29 +0000 UTC" firstStartedPulling="2025-09-30 13:53:30.393444981 +0000 UTC m=+1110.324911288" lastFinishedPulling="2025-09-30 13:53:44.328313895 +0000 UTC m=+1124.259780202" observedRunningTime="2025-09-30 13:53:45.261328925 +0000 UTC m=+1125.192795232" watchObservedRunningTime="2025-09-30 13:53:45.267466412 +0000 UTC m=+1125.198932719" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.247109 4783 generic.go:334] "Generic (PLEG): container finished" podID="2070f204-8f44-4bcb-8082-440aed622fbf" containerID="31da0d4fbb6febd8bf5042f2feb8393e2237e495458a723709caf6e0acb16671" exitCode=0 Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.247203 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-9jmbs" event={"ID":"2070f204-8f44-4bcb-8082-440aed622fbf","Type":"ContainerDied","Data":"31da0d4fbb6febd8bf5042f2feb8393e2237e495458a723709caf6e0acb16671"} Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.251252 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" containerID="dbcbbb2c21a7bbf5f6862e92534c203025b94a1782ab43292ea1052cc9b2702e" exitCode=0 Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.251408 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fqsdm" event={"ID":"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1","Type":"ContainerDied","Data":"dbcbbb2c21a7bbf5f6862e92534c203025b94a1782ab43292ea1052cc9b2702e"} Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.697289 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.705563 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.706764 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.854794 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxksd\" (UniqueName: \"kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd\") pod \"011eca05-b58b-4412-b0d8-3700bb26099b\" (UID: \"011eca05-b58b-4412-b0d8-3700bb26099b\") " Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.854980 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn5sx\" (UniqueName: \"kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx\") pod \"25530b76-7a63-45b8-b096-492a37a7237d\" (UID: \"25530b76-7a63-45b8-b096-492a37a7237d\") " Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.855633 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tntcr\" (UniqueName: \"kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr\") pod \"01ac8973-fc47-4f89-86d2-b973ef33a21d\" (UID: \"01ac8973-fc47-4f89-86d2-b973ef33a21d\") " Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.863202 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd" (OuterVolumeSpecName: "kube-api-access-rxksd") pod "011eca05-b58b-4412-b0d8-3700bb26099b" (UID: "011eca05-b58b-4412-b0d8-3700bb26099b"). InnerVolumeSpecName "kube-api-access-rxksd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.863990 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr" (OuterVolumeSpecName: "kube-api-access-tntcr") pod "01ac8973-fc47-4f89-86d2-b973ef33a21d" (UID: "01ac8973-fc47-4f89-86d2-b973ef33a21d"). InnerVolumeSpecName "kube-api-access-tntcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.864593 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx" (OuterVolumeSpecName: "kube-api-access-sn5sx") pod "25530b76-7a63-45b8-b096-492a37a7237d" (UID: "25530b76-7a63-45b8-b096-492a37a7237d"). InnerVolumeSpecName "kube-api-access-sn5sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.958655 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn5sx\" (UniqueName: \"kubernetes.io/projected/25530b76-7a63-45b8-b096-492a37a7237d-kube-api-access-sn5sx\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.958735 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tntcr\" (UniqueName: \"kubernetes.io/projected/01ac8973-fc47-4f89-86d2-b973ef33a21d-kube-api-access-tntcr\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:46 crc kubenswrapper[4783]: I0930 13:53:46.958753 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxksd\" (UniqueName: \"kubernetes.io/projected/011eca05-b58b-4412-b0d8-3700bb26099b-kube-api-access-rxksd\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.267892 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-230c-account-create-9rv9g" event={"ID":"01ac8973-fc47-4f89-86d2-b973ef33a21d","Type":"ContainerDied","Data":"dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1"} Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.267940 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dadc2d3881334dc6f3187e49a68b22a5ea666597246bd8082ff28afa6d31f8f1" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.267945 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-230c-account-create-9rv9g" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.270039 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8272-account-create-2x85s" event={"ID":"25530b76-7a63-45b8-b096-492a37a7237d","Type":"ContainerDied","Data":"fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2"} Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.270072 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa74798cddf6912524e903e134e399dfe4d21cba98bde8b1f032c1757d086ba2" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.270128 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8272-account-create-2x85s" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.273017 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ac9c-account-create-lkwhv" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.274920 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ac9c-account-create-lkwhv" event={"ID":"011eca05-b58b-4412-b0d8-3700bb26099b","Type":"ContainerDied","Data":"aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752"} Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.275111 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa90170fdbc77fde8039860efb00f7860db2bb38bb0445dfb50bc956cb343752" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.675376 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.684770 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773034 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle\") pod \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773136 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773199 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txcrk\" (UniqueName: \"kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773251 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773269 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773315 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsl79\" (UniqueName: \"kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79\") pod \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773331 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data\") pod \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\" (UID: \"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773363 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.773426 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts\") pod \"2070f204-8f44-4bcb-8082-440aed622fbf\" (UID: \"2070f204-8f44-4bcb-8082-440aed622fbf\") " Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.774117 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.774735 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.774804 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.774859 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run" (OuterVolumeSpecName: "var-run") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.775048 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts" (OuterVolumeSpecName: "scripts") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.786260 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79" (OuterVolumeSpecName: "kube-api-access-lsl79") pod "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" (UID: "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1"). InnerVolumeSpecName "kube-api-access-lsl79". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.786601 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk" (OuterVolumeSpecName: "kube-api-access-txcrk") pod "2070f204-8f44-4bcb-8082-440aed622fbf" (UID: "2070f204-8f44-4bcb-8082-440aed622fbf"). InnerVolumeSpecName "kube-api-access-txcrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.807098 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" (UID: "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.841765 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data" (OuterVolumeSpecName: "config-data") pod "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" (UID: "2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875730 4783 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875772 4783 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875806 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsl79\" (UniqueName: \"kubernetes.io/projected/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-kube-api-access-lsl79\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875821 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875833 4783 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2070f204-8f44-4bcb-8082-440aed622fbf-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875844 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875854 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875888 4783 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2070f204-8f44-4bcb-8082-440aed622fbf-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:47 crc kubenswrapper[4783]: I0930 13:53:47.875900 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txcrk\" (UniqueName: \"kubernetes.io/projected/2070f204-8f44-4bcb-8082-440aed622fbf-kube-api-access-txcrk\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.294136 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6-config-9jmbs" event={"ID":"2070f204-8f44-4bcb-8082-440aed622fbf","Type":"ContainerDied","Data":"3330d8fe05c885ba215ebd72777bfc52677ce62dfab3328787084eb49afdb9d8"} Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.294183 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3330d8fe05c885ba215ebd72777bfc52677ce62dfab3328787084eb49afdb9d8" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.294279 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6-config-9jmbs" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.300067 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fqsdm" event={"ID":"2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1","Type":"ContainerDied","Data":"79f06eddfb7e3add08d42a00c9056ca9eeb2b346daeba598bf625d3b63b2351d"} Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.300117 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79f06eddfb7e3add08d42a00c9056ca9eeb2b346daeba598bf625d3b63b2351d" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.300171 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fqsdm" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.617737 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fpfgx"] Sep 30 13:53:48 crc kubenswrapper[4783]: E0930 13:53:48.618499 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2070f204-8f44-4bcb-8082-440aed622fbf" containerName="ovn-config" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618522 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2070f204-8f44-4bcb-8082-440aed622fbf" containerName="ovn-config" Sep 30 13:53:48 crc kubenswrapper[4783]: E0930 13:53:48.618546 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="011eca05-b58b-4412-b0d8-3700bb26099b" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618556 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="011eca05-b58b-4412-b0d8-3700bb26099b" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: E0930 13:53:48.618570 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01ac8973-fc47-4f89-86d2-b973ef33a21d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618577 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01ac8973-fc47-4f89-86d2-b973ef33a21d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: E0930 13:53:48.618588 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25530b76-7a63-45b8-b096-492a37a7237d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618594 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="25530b76-7a63-45b8-b096-492a37a7237d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: E0930 13:53:48.618604 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" containerName="keystone-db-sync" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618611 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" containerName="keystone-db-sync" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618800 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="011eca05-b58b-4412-b0d8-3700bb26099b" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618817 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01ac8973-fc47-4f89-86d2-b973ef33a21d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618830 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2070f204-8f44-4bcb-8082-440aed622fbf" containerName="ovn-config" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618851 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="25530b76-7a63-45b8-b096-492a37a7237d" containerName="mariadb-account-create" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.618865 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" containerName="keystone-db-sync" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.619529 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.623512 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.623681 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.623704 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.623681 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-grwfp" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.632272 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.632540 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="dnsmasq-dns" containerID="cri-o://5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0" gracePeriod=10 Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.648586 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fpfgx"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.694122 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.701237 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.715066 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811471 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811540 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811573 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811599 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811662 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811705 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpkjr\" (UniqueName: \"kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811749 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811770 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811800 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811856 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811901 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.811927 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmdf5\" (UniqueName: \"kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.882642 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-th6r6-config-9jmbs"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.894288 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-th6r6-config-9jmbs"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.908181 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913306 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913494 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913584 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913672 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913736 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913824 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.913904 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpkjr\" (UniqueName: \"kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914016 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914087 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914155 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914258 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914337 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.914400 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmdf5\" (UniqueName: \"kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.915503 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.915627 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.916029 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.916678 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.917088 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.918324 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.919212 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.920782 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.921849 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.924699 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.926583 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.928821 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.948306 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpkjr\" (UniqueName: \"kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.949162 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys\") pod \"keystone-bootstrap-fpfgx\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.961928 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmdf5\" (UniqueName: \"kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5\") pod \"dnsmasq-dns-7f4777664c-hz64m\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.966441 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-th9hn"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.967734 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.974285 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.975039 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.975605 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.975758 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 13:53:48 crc kubenswrapper[4783]: I0930 13:53:48.976367 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2tgtm" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.013083 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-th9hn"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.018395 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.018870 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019269 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019360 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019528 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019649 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019714 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2wr5\" (UniqueName: \"kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.019808 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.021206 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.108331 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121789 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121862 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121885 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2wr5\" (UniqueName: \"kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121911 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvwqk\" (UniqueName: \"kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121953 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.121993 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122034 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122058 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122073 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122093 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122113 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122131 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shbwc\" (UniqueName: \"kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122157 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122178 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122196 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122231 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122250 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122266 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122263 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.122439 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.125925 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.126820 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.127158 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.127838 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.137679 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2wr5\" (UniqueName: \"kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5\") pod \"ceilometer-0\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.223666 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.223737 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shbwc\" (UniqueName: \"kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.223800 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.223828 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.223855 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224041 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224071 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224118 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224164 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvwqk\" (UniqueName: \"kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224322 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224384 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.224521 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.228958 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.229454 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.230193 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.230585 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.230755 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.231047 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.232602 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.243354 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.247498 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shbwc\" (UniqueName: \"kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc\") pod \"dnsmasq-dns-b4dc449d9-9rtzr\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.247963 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.248622 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvwqk\" (UniqueName: \"kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk\") pod \"placement-db-sync-th9hn\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.262122 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.355590 4783 generic.go:334] "Generic (PLEG): container finished" podID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerID="5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0" exitCode=0 Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.355640 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" event={"ID":"45bc7e76-3536-419f-8f07-6b4c4554295e","Type":"ContainerDied","Data":"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0"} Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.355670 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" event={"ID":"45bc7e76-3536-419f-8f07-6b4c4554295e","Type":"ContainerDied","Data":"e2e835537f9ef248d587b4270d8d88c3e43f6830842fcf2c549035a96880108f"} Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.355690 4783 scope.go:117] "RemoveContainer" containerID="5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.355887 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cfbb96789-5wdf6" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.371322 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.385539 4783 scope.go:117] "RemoveContainer" containerID="ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.387923 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-th9hn" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.395533 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426674 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426725 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426773 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426888 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426973 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2r6k\" (UniqueName: \"kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.426989 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config\") pod \"45bc7e76-3536-419f-8f07-6b4c4554295e\" (UID: \"45bc7e76-3536-419f-8f07-6b4c4554295e\") " Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.432008 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k" (OuterVolumeSpecName: "kube-api-access-m2r6k") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "kube-api-access-m2r6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.510916 4783 scope.go:117] "RemoveContainer" containerID="5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0" Sep 30 13:53:49 crc kubenswrapper[4783]: E0930 13:53:49.512003 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0\": container with ID starting with 5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0 not found: ID does not exist" containerID="5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.512050 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0"} err="failed to get container status \"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0\": rpc error: code = NotFound desc = could not find container \"5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0\": container with ID starting with 5b8aebbea0ba5437f3d1d08ac1e4152d4d2911f0fa34ed702a4365b795281ad0 not found: ID does not exist" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.512079 4783 scope.go:117] "RemoveContainer" containerID="ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b" Sep 30 13:53:49 crc kubenswrapper[4783]: E0930 13:53:49.517025 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b\": container with ID starting with ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b not found: ID does not exist" containerID="ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.517060 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b"} err="failed to get container status \"ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b\": rpc error: code = NotFound desc = could not find container \"ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b\": container with ID starting with ec99d461f917862f91ddba219ecc1db525e7f7eb7b6cae8d055578ca5a1c0c4b not found: ID does not exist" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.533245 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2r6k\" (UniqueName: \"kubernetes.io/projected/45bc7e76-3536-419f-8f07-6b4c4554295e-kube-api-access-m2r6k\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.534782 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:49 crc kubenswrapper[4783]: W0930 13:53:49.585859 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod690caf75_eda1_4d7d_abaf_1d32a96d50b2.slice/crio-0602afbaa1a45466103da6d888a7b6379176efd08e81ba03aba5fa788a1b053a WatchSource:0}: Error finding container 0602afbaa1a45466103da6d888a7b6379176efd08e81ba03aba5fa788a1b053a: Status 404 returned error can't find the container with id 0602afbaa1a45466103da6d888a7b6379176efd08e81ba03aba5fa788a1b053a Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.587642 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.590937 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.598088 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.604931 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config" (OuterVolumeSpecName: "config") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.610329 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "45bc7e76-3536-419f-8f07-6b4c4554295e" (UID: "45bc7e76-3536-419f-8f07-6b4c4554295e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.634923 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.634956 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.634968 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.634977 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.634985 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45bc7e76-3536-419f-8f07-6b4c4554295e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.726198 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.737817 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fpfgx"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.744438 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cfbb96789-5wdf6"] Sep 30 13:53:49 crc kubenswrapper[4783]: I0930 13:53:49.862858 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:49 crc kubenswrapper[4783]: W0930 13:53:49.874627 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12cd9ff8_532c_4537_bd17_ac9525578d46.slice/crio-64b9568739bb968d48d3200b4bee670deda1d53283f4d62a75a37a56fce89e81 WatchSource:0}: Error finding container 64b9568739bb968d48d3200b4bee670deda1d53283f4d62a75a37a56fce89e81: Status 404 returned error can't find the container with id 64b9568739bb968d48d3200b4bee670deda1d53283f4d62a75a37a56fce89e81 Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.011568 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:53:50 crc kubenswrapper[4783]: W0930 13:53:50.034067 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8257f606_4b1c_46e2_918e_9ebf1128f6cc.slice/crio-9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5 WatchSource:0}: Error finding container 9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5: Status 404 returned error can't find the container with id 9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5 Sep 30 13:53:50 crc kubenswrapper[4783]: W0930 13:53:50.111826 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7b8982d_c79a_470e_a0b5_1a8c2e299993.slice/crio-f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a WatchSource:0}: Error finding container f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a: Status 404 returned error can't find the container with id f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.112353 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-th9hn"] Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.364575 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerStarted","Data":"9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.367043 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-th9hn" event={"ID":"f7b8982d-c79a-470e-a0b5-1a8c2e299993","Type":"ContainerStarted","Data":"f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.368981 4783 generic.go:334] "Generic (PLEG): container finished" podID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerID="eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0" exitCode=0 Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.369101 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" event={"ID":"12cd9ff8-532c-4537-bd17-ac9525578d46","Type":"ContainerDied","Data":"eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.369169 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" event={"ID":"12cd9ff8-532c-4537-bd17-ac9525578d46","Type":"ContainerStarted","Data":"64b9568739bb968d48d3200b4bee670deda1d53283f4d62a75a37a56fce89e81"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.370846 4783 generic.go:334] "Generic (PLEG): container finished" podID="690caf75-eda1-4d7d-abaf-1d32a96d50b2" containerID="3ac449586b848ab647fe4a0a89aaef4122fa3cb2bc5e7ac12cf7ee8a119474fb" exitCode=0 Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.370921 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" event={"ID":"690caf75-eda1-4d7d-abaf-1d32a96d50b2","Type":"ContainerDied","Data":"3ac449586b848ab647fe4a0a89aaef4122fa3cb2bc5e7ac12cf7ee8a119474fb"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.370952 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" event={"ID":"690caf75-eda1-4d7d-abaf-1d32a96d50b2","Type":"ContainerStarted","Data":"0602afbaa1a45466103da6d888a7b6379176efd08e81ba03aba5fa788a1b053a"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.372495 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fpfgx" event={"ID":"c7088b96-6490-476f-ae28-86e6a4224f8f","Type":"ContainerStarted","Data":"26f2653ce8c6fa111aefaee7376c8de9973a390c28f5166fee1bda8b0fa25259"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.372530 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fpfgx" event={"ID":"c7088b96-6490-476f-ae28-86e6a4224f8f","Type":"ContainerStarted","Data":"35a4d2b6dadd19844e558b8f134cffadaa57639fb93cbe483d0823cc3ea3f1ee"} Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.512117 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fpfgx" podStartSLOduration=2.512093198 podStartE2EDuration="2.512093198s" podCreationTimestamp="2025-09-30 13:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:50.444076338 +0000 UTC m=+1130.375542655" watchObservedRunningTime="2025-09-30 13:53:50.512093198 +0000 UTC m=+1130.443559505" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.748247 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.854428 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2070f204-8f44-4bcb-8082-440aed622fbf" path="/var/lib/kubelet/pods/2070f204-8f44-4bcb-8082-440aed622fbf/volumes" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.855067 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" path="/var/lib/kubelet/pods/45bc7e76-3536-419f-8f07-6b4c4554295e/volumes" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.861356 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.862085 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.862189 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.862317 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.862416 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.862509 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmdf5\" (UniqueName: \"kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5\") pod \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\" (UID: \"690caf75-eda1-4d7d-abaf-1d32a96d50b2\") " Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.876391 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5" (OuterVolumeSpecName: "kube-api-access-bmdf5") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "kube-api-access-bmdf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.888776 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config" (OuterVolumeSpecName: "config") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.889986 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.895574 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.901046 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.905848 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "690caf75-eda1-4d7d-abaf-1d32a96d50b2" (UID: "690caf75-eda1-4d7d-abaf-1d32a96d50b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964687 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmdf5\" (UniqueName: \"kubernetes.io/projected/690caf75-eda1-4d7d-abaf-1d32a96d50b2-kube-api-access-bmdf5\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964722 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964737 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964747 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964758 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:50 crc kubenswrapper[4783]: I0930 13:53:50.964771 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/690caf75-eda1-4d7d-abaf-1d32a96d50b2-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.262140 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.384785 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" event={"ID":"690caf75-eda1-4d7d-abaf-1d32a96d50b2","Type":"ContainerDied","Data":"0602afbaa1a45466103da6d888a7b6379176efd08e81ba03aba5fa788a1b053a"} Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.384843 4783 scope.go:117] "RemoveContainer" containerID="3ac449586b848ab647fe4a0a89aaef4122fa3cb2bc5e7ac12cf7ee8a119474fb" Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.384979 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f4777664c-hz64m" Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.391590 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" event={"ID":"12cd9ff8-532c-4537-bd17-ac9525578d46","Type":"ContainerStarted","Data":"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90"} Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.391770 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.414454 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" podStartSLOduration=3.414437994 podStartE2EDuration="3.414437994s" podCreationTimestamp="2025-09-30 13:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:51.406235001 +0000 UTC m=+1131.337701308" watchObservedRunningTime="2025-09-30 13:53:51.414437994 +0000 UTC m=+1131.345904301" Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.456787 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:51 crc kubenswrapper[4783]: I0930 13:53:51.467536 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f4777664c-hz64m"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.063692 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-qcbqm"] Sep 30 13:53:52 crc kubenswrapper[4783]: E0930 13:53:52.064067 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="dnsmasq-dns" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064084 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="dnsmasq-dns" Sep 30 13:53:52 crc kubenswrapper[4783]: E0930 13:53:52.064105 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690caf75-eda1-4d7d-abaf-1d32a96d50b2" containerName="init" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064112 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="690caf75-eda1-4d7d-abaf-1d32a96d50b2" containerName="init" Sep 30 13:53:52 crc kubenswrapper[4783]: E0930 13:53:52.064121 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="init" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064127 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="init" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064323 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="690caf75-eda1-4d7d-abaf-1d32a96d50b2" containerName="init" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064339 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="45bc7e76-3536-419f-8f07-6b4c4554295e" containerName="dnsmasq-dns" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.064869 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.067539 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-78bp2" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.071362 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.071532 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.081671 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qcbqm"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192500 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192563 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192618 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmkfs\" (UniqueName: \"kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192641 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.192690 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294120 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294187 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294214 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294250 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmkfs\" (UniqueName: \"kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294275 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.294323 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.295135 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.307201 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.307777 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.309818 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.324548 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.329837 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmkfs\" (UniqueName: \"kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs\") pod \"cinder-db-sync-qcbqm\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.388985 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.450777 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-dp2b8"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.451942 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.455033 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.455299 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wckjk" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.462011 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dp2b8"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.598824 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.598891 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.598982 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlj4l\" (UniqueName: \"kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.647881 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-wvfc5"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.648923 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.651028 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-ls4mg" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.651065 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.651136 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.708976 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wvfc5"] Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.710904 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.711085 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlj4l\" (UniqueName: \"kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.711151 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.729873 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.730658 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.737579 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlj4l\" (UniqueName: \"kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l\") pod \"barbican-db-sync-dp2b8\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.778197 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.813258 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.813345 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.813711 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9p5r\" (UniqueName: \"kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.859791 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="690caf75-eda1-4d7d-abaf-1d32a96d50b2" path="/var/lib/kubelet/pods/690caf75-eda1-4d7d-abaf-1d32a96d50b2/volumes" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.914988 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9p5r\" (UniqueName: \"kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.915038 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.915099 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.918314 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.921046 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.930434 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9p5r\" (UniqueName: \"kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r\") pod \"neutron-db-sync-wvfc5\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:52 crc kubenswrapper[4783]: I0930 13:53:52.971038 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:53:53 crc kubenswrapper[4783]: I0930 13:53:53.414834 4783 generic.go:334] "Generic (PLEG): container finished" podID="13707881-f4b3-4fea-b926-3724eb156688" containerID="de7d813759552084e588f18ac7d2fa4049833b8b17f8a47d3bf8b8918e2749e6" exitCode=0 Sep 30 13:53:53 crc kubenswrapper[4783]: I0930 13:53:53.414896 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mvmd6" event={"ID":"13707881-f4b3-4fea-b926-3724eb156688","Type":"ContainerDied","Data":"de7d813759552084e588f18ac7d2fa4049833b8b17f8a47d3bf8b8918e2749e6"} Sep 30 13:53:54 crc kubenswrapper[4783]: I0930 13:53:54.432091 4783 generic.go:334] "Generic (PLEG): container finished" podID="c7088b96-6490-476f-ae28-86e6a4224f8f" containerID="26f2653ce8c6fa111aefaee7376c8de9973a390c28f5166fee1bda8b0fa25259" exitCode=0 Sep 30 13:53:54 crc kubenswrapper[4783]: I0930 13:53:54.432181 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fpfgx" event={"ID":"c7088b96-6490-476f-ae28-86e6a4224f8f","Type":"ContainerDied","Data":"26f2653ce8c6fa111aefaee7376c8de9973a390c28f5166fee1bda8b0fa25259"} Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.281685 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.288755 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.407884 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.407937 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle\") pod \"13707881-f4b3-4fea-b926-3724eb156688\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408004 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408052 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data\") pod \"13707881-f4b3-4fea-b926-3724eb156688\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408091 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408117 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zdxq\" (UniqueName: \"kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq\") pod \"13707881-f4b3-4fea-b926-3724eb156688\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408200 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpkjr\" (UniqueName: \"kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408270 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408294 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys\") pod \"c7088b96-6490-476f-ae28-86e6a4224f8f\" (UID: \"c7088b96-6490-476f-ae28-86e6a4224f8f\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.408335 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data\") pod \"13707881-f4b3-4fea-b926-3724eb156688\" (UID: \"13707881-f4b3-4fea-b926-3724eb156688\") " Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.415646 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr" (OuterVolumeSpecName: "kube-api-access-mpkjr") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "kube-api-access-mpkjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.415701 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq" (OuterVolumeSpecName: "kube-api-access-6zdxq") pod "13707881-f4b3-4fea-b926-3724eb156688" (UID: "13707881-f4b3-4fea-b926-3724eb156688"). InnerVolumeSpecName "kube-api-access-6zdxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.416890 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "13707881-f4b3-4fea-b926-3724eb156688" (UID: "13707881-f4b3-4fea-b926-3724eb156688"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.417476 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.420200 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.424481 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts" (OuterVolumeSpecName: "scripts") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.450473 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13707881-f4b3-4fea-b926-3724eb156688" (UID: "13707881-f4b3-4fea-b926-3724eb156688"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.452858 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.454094 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fpfgx" event={"ID":"c7088b96-6490-476f-ae28-86e6a4224f8f","Type":"ContainerDied","Data":"35a4d2b6dadd19844e558b8f134cffadaa57639fb93cbe483d0823cc3ea3f1ee"} Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.454130 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35a4d2b6dadd19844e558b8f134cffadaa57639fb93cbe483d0823cc3ea3f1ee" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.454180 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fpfgx" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.456030 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-mvmd6" event={"ID":"13707881-f4b3-4fea-b926-3724eb156688","Type":"ContainerDied","Data":"77a7098a45a8170a92dd803ff4a21392788224b2bf58b77e1d481a1d33b5be3b"} Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.456061 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77a7098a45a8170a92dd803ff4a21392788224b2bf58b77e1d481a1d33b5be3b" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.456094 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-mvmd6" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.456255 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data" (OuterVolumeSpecName: "config-data") pod "c7088b96-6490-476f-ae28-86e6a4224f8f" (UID: "c7088b96-6490-476f-ae28-86e6a4224f8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.485580 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data" (OuterVolumeSpecName: "config-data") pod "13707881-f4b3-4fea-b926-3724eb156688" (UID: "13707881-f4b3-4fea-b926-3724eb156688"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510032 4783 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510078 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510095 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zdxq\" (UniqueName: \"kubernetes.io/projected/13707881-f4b3-4fea-b926-3724eb156688-kube-api-access-6zdxq\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510110 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510121 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpkjr\" (UniqueName: \"kubernetes.io/projected/c7088b96-6490-476f-ae28-86e6a4224f8f-kube-api-access-mpkjr\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510132 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510141 4783 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510153 4783 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510164 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7088b96-6490-476f-ae28-86e6a4224f8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.510175 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13707881-f4b3-4fea-b926-3724eb156688-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.555746 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fpfgx"] Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.572127 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fpfgx"] Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.649866 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-b48f7"] Sep 30 13:53:56 crc kubenswrapper[4783]: E0930 13:53:56.650354 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13707881-f4b3-4fea-b926-3724eb156688" containerName="glance-db-sync" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.650379 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="13707881-f4b3-4fea-b926-3724eb156688" containerName="glance-db-sync" Sep 30 13:53:56 crc kubenswrapper[4783]: E0930 13:53:56.650426 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7088b96-6490-476f-ae28-86e6a4224f8f" containerName="keystone-bootstrap" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.650436 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7088b96-6490-476f-ae28-86e6a4224f8f" containerName="keystone-bootstrap" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.650656 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="13707881-f4b3-4fea-b926-3724eb156688" containerName="glance-db-sync" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.650688 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7088b96-6490-476f-ae28-86e6a4224f8f" containerName="keystone-bootstrap" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.651517 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.658626 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b48f7"] Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.815410 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.815762 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.815797 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.815823 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.815857 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.816096 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfwlp\" (UniqueName: \"kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.855784 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7088b96-6490-476f-ae28-86e6a4224f8f" path="/var/lib/kubelet/pods/c7088b96-6490-476f-ae28-86e6a4224f8f/volumes" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.917930 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfwlp\" (UniqueName: \"kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.918030 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.918078 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.918139 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.918185 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.918270 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.923925 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.926039 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.929086 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.929547 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.929654 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.935670 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfwlp\" (UniqueName: \"kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp\") pod \"keystone-bootstrap-b48f7\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:56 crc kubenswrapper[4783]: I0930 13:53:56.980963 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.404587 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wvfc5"] Sep 30 13:53:57 crc kubenswrapper[4783]: W0930 13:53:57.411180 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99edfb42_ed13_471e_8e93_62ccafc5b190.slice/crio-1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec WatchSource:0}: Error finding container 1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec: Status 404 returned error can't find the container with id 1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.472741 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wvfc5" event={"ID":"99edfb42-ed13-471e-8e93-62ccafc5b190","Type":"ContainerStarted","Data":"1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec"} Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.490513 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerStarted","Data":"890c5af0166010b924f1827b8ebfc5b1431692aedf4260f23c576544efc94058"} Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.499897 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-th9hn" event={"ID":"f7b8982d-c79a-470e-a0b5-1a8c2e299993","Type":"ContainerStarted","Data":"bfdcaaa26c0aafae7f67df9116fac849120c495a3024bcb87d2ec57cb5988177"} Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.508019 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qcbqm"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.514178 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dp2b8"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.523797 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-th9hn" podStartSLOduration=2.641004866 podStartE2EDuration="9.523782302s" podCreationTimestamp="2025-09-30 13:53:48 +0000 UTC" firstStartedPulling="2025-09-30 13:53:50.114936233 +0000 UTC m=+1130.046402540" lastFinishedPulling="2025-09-30 13:53:56.997713669 +0000 UTC m=+1136.929179976" observedRunningTime="2025-09-30 13:53:57.519601478 +0000 UTC m=+1137.451067795" watchObservedRunningTime="2025-09-30 13:53:57.523782302 +0000 UTC m=+1137.455248609" Sep 30 13:53:57 crc kubenswrapper[4783]: W0930 13:53:57.533295 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8ef4c24_7507_41c1_aa3c_4e2a9b8f6672.slice/crio-02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f WatchSource:0}: Error finding container 02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f: Status 404 returned error can't find the container with id 02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.595991 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b48f7"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.684335 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.684554 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="dnsmasq-dns" containerID="cri-o://d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90" gracePeriod=10 Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.686406 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.734440 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.736411 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.796815 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846096 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846380 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846407 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846430 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846465 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.846488 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-586sq\" (UniqueName: \"kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947678 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947744 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947767 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947789 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947830 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.947853 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-586sq\" (UniqueName: \"kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.948843 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.948977 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.949020 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.949259 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.950181 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:57 crc kubenswrapper[4783]: I0930 13:53:57.966466 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-586sq\" (UniqueName: \"kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq\") pod \"dnsmasq-dns-5674f66f87-q5c7d\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.056238 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.093149 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253077 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253461 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253510 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shbwc\" (UniqueName: \"kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253546 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253574 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.253613 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config\") pod \"12cd9ff8-532c-4537-bd17-ac9525578d46\" (UID: \"12cd9ff8-532c-4537-bd17-ac9525578d46\") " Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.297659 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc" (OuterVolumeSpecName: "kube-api-access-shbwc") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "kube-api-access-shbwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.348488 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.356291 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.356330 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shbwc\" (UniqueName: \"kubernetes.io/projected/12cd9ff8-532c-4537-bd17-ac9525578d46-kube-api-access-shbwc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.380149 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.386039 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.427010 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.457834 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.457861 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.457871 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.465692 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config" (OuterVolumeSpecName: "config") pod "12cd9ff8-532c-4537-bd17-ac9525578d46" (UID: "12cd9ff8-532c-4537-bd17-ac9525578d46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.510906 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dp2b8" event={"ID":"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672","Type":"ContainerStarted","Data":"02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.512204 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b48f7" event={"ID":"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d","Type":"ContainerStarted","Data":"ca7a540125edb5ea8f93e4392a57252e540d0f904cfd8a485cd8c030f537dd2b"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.512255 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b48f7" event={"ID":"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d","Type":"ContainerStarted","Data":"ef2a2703da3a2b2abc1f75f29a7bc73438f925d7cd20e9b0342815c82f09c0f2"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.514764 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qcbqm" event={"ID":"04087483-d2dd-4f70-99f1-592a46394263","Type":"ContainerStarted","Data":"a34228bb0d29433f66a191cca2c583f5b0ad262e498c53266566416b15fddd64"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.517014 4783 generic.go:334] "Generic (PLEG): container finished" podID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerID="d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90" exitCode=0 Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.517075 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" event={"ID":"12cd9ff8-532c-4537-bd17-ac9525578d46","Type":"ContainerDied","Data":"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.517102 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" event={"ID":"12cd9ff8-532c-4537-bd17-ac9525578d46","Type":"ContainerDied","Data":"64b9568739bb968d48d3200b4bee670deda1d53283f4d62a75a37a56fce89e81"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.517131 4783 scope.go:117] "RemoveContainer" containerID="d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.517286 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b4dc449d9-9rtzr" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.527846 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wvfc5" event={"ID":"99edfb42-ed13-471e-8e93-62ccafc5b190","Type":"ContainerStarted","Data":"237eb38fbdc94904d60796a2236ae34e060003dfa7daad4c18b8375762d75436"} Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.547591 4783 scope.go:117] "RemoveContainer" containerID="eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.557336 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-b48f7" podStartSLOduration=2.557318272 podStartE2EDuration="2.557318272s" podCreationTimestamp="2025-09-30 13:53:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:58.527493027 +0000 UTC m=+1138.458959344" watchObservedRunningTime="2025-09-30 13:53:58.557318272 +0000 UTC m=+1138.488784579" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.574040 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12cd9ff8-532c-4537-bd17-ac9525578d46-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.585329 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-wvfc5" podStartSLOduration=6.585303719 podStartE2EDuration="6.585303719s" podCreationTimestamp="2025-09-30 13:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:53:58.543751978 +0000 UTC m=+1138.475218295" watchObservedRunningTime="2025-09-30 13:53:58.585303719 +0000 UTC m=+1138.516770036" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.595320 4783 scope.go:117] "RemoveContainer" containerID="d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90" Sep 30 13:53:58 crc kubenswrapper[4783]: E0930 13:53:58.595851 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90\": container with ID starting with d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90 not found: ID does not exist" containerID="d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.595922 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90"} err="failed to get container status \"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90\": rpc error: code = NotFound desc = could not find container \"d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90\": container with ID starting with d6429eb0c00b98546e48a36a7836dc511a86fdc3e9786896f8cd134aebd1eb90 not found: ID does not exist" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.595956 4783 scope.go:117] "RemoveContainer" containerID="eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0" Sep 30 13:53:58 crc kubenswrapper[4783]: E0930 13:53:58.612061 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0\": container with ID starting with eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0 not found: ID does not exist" containerID="eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.612118 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0"} err="failed to get container status \"eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0\": rpc error: code = NotFound desc = could not find container \"eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0\": container with ID starting with eff68e2a5bb74a237438e3f6b6df0840c3948f12c8278800279202a74df218e0 not found: ID does not exist" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.620940 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.628846 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b4dc449d9-9rtzr"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.653596 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.700007 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:53:58 crc kubenswrapper[4783]: E0930 13:53:58.700474 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="init" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.700494 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="init" Sep 30 13:53:58 crc kubenswrapper[4783]: E0930 13:53:58.700517 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="dnsmasq-dns" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.700525 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="dnsmasq-dns" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.700754 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" containerName="dnsmasq-dns" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.702274 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.705375 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.705645 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.705781 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-r4nf8" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.724675 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.751924 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.753252 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.757942 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.814293 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.867465 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12cd9ff8-532c-4537-bd17-ac9525578d46" path="/var/lib/kubelet/pods/12cd9ff8-532c-4537-bd17-ac9525578d46/volumes" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881055 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881105 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881130 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881156 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881189 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881208 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881427 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881510 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881539 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881570 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgrxb\" (UniqueName: \"kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881627 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881722 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881770 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.881852 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwbsc\" (UniqueName: \"kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.982959 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983017 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983048 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgrxb\" (UniqueName: \"kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983084 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983111 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983131 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983176 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwbsc\" (UniqueName: \"kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983197 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983242 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983268 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983302 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983350 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983376 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983461 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.983620 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.984563 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.984951 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.985156 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.985442 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.985514 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.988586 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.989396 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.992914 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.995265 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.995973 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:58 crc kubenswrapper[4783]: I0930 13:53:58.998703 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.001849 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwbsc\" (UniqueName: \"kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.006702 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgrxb\" (UniqueName: \"kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.021097 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " pod="openstack/glance-default-external-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.026164 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.041305 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.094412 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.584532 4783 generic.go:334] "Generic (PLEG): container finished" podID="435b0af3-e4bc-417f-a517-03c12786cee4" containerID="4e4c72cb57a3437b4f2c65d5e2a4bdc02b29584b0bd555a7d8a99d90c5971373" exitCode=0 Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.585034 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" event={"ID":"435b0af3-e4bc-417f-a517-03c12786cee4","Type":"ContainerDied","Data":"4e4c72cb57a3437b4f2c65d5e2a4bdc02b29584b0bd555a7d8a99d90c5971373"} Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.585060 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" event={"ID":"435b0af3-e4bc-417f-a517-03c12786cee4","Type":"ContainerStarted","Data":"cc595e34910e5207b50fa76818c55f6300cb6a031a23f4bf072d265db73b5bb7"} Sep 30 13:53:59 crc kubenswrapper[4783]: W0930 13:53:59.812490 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4035860_f960_4405_b1b7_33a77b3cb4e5.slice/crio-0e0c0ee9ecfbbdacaead2c2e26235237dcb3f9ccfd4c6f9631c4e324934329ce WatchSource:0}: Error finding container 0e0c0ee9ecfbbdacaead2c2e26235237dcb3f9ccfd4c6f9631c4e324934329ce: Status 404 returned error can't find the container with id 0e0c0ee9ecfbbdacaead2c2e26235237dcb3f9ccfd4c6f9631c4e324934329ce Sep 30 13:53:59 crc kubenswrapper[4783]: I0930 13:53:59.814808 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.501630 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.546198 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:00 crc kubenswrapper[4783]: W0930 13:54:00.566610 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ad95b8f_b4a8_44b4_8d2f_1b234e916d55.slice/crio-0f56ac41f979ebcbdde5651c3fa8c70350e6f59be50319503b6a16bb051bd550 WatchSource:0}: Error finding container 0f56ac41f979ebcbdde5651c3fa8c70350e6f59be50319503b6a16bb051bd550: Status 404 returned error can't find the container with id 0f56ac41f979ebcbdde5651c3fa8c70350e6f59be50319503b6a16bb051bd550 Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.577598 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.620759 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerStarted","Data":"04e369386fa3cd68160f926ead01508468ec157e043755af4551cf1f7224bf89"} Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.620819 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerStarted","Data":"0e0c0ee9ecfbbdacaead2c2e26235237dcb3f9ccfd4c6f9631c4e324934329ce"} Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.633337 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerStarted","Data":"0f56ac41f979ebcbdde5651c3fa8c70350e6f59be50319503b6a16bb051bd550"} Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.638840 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" event={"ID":"435b0af3-e4bc-417f-a517-03c12786cee4","Type":"ContainerStarted","Data":"7529745251e90cc3938c592e0145e0f316a4a387a8df7fcf029dedb6c177263b"} Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.638997 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:54:00 crc kubenswrapper[4783]: I0930 13:54:00.663747 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" podStartSLOduration=3.663730432 podStartE2EDuration="3.663730432s" podCreationTimestamp="2025-09-30 13:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:00.658441873 +0000 UTC m=+1140.589908180" watchObservedRunningTime="2025-09-30 13:54:00.663730432 +0000 UTC m=+1140.595196739" Sep 30 13:54:01 crc kubenswrapper[4783]: I0930 13:54:01.652285 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerStarted","Data":"77c0e75f7160556224744bbf5e36c4c07dcab123b479921029a60183c6eeabec"} Sep 30 13:54:01 crc kubenswrapper[4783]: I0930 13:54:01.652595 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-log" containerID="cri-o://04e369386fa3cd68160f926ead01508468ec157e043755af4551cf1f7224bf89" gracePeriod=30 Sep 30 13:54:01 crc kubenswrapper[4783]: I0930 13:54:01.652945 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-httpd" containerID="cri-o://77c0e75f7160556224744bbf5e36c4c07dcab123b479921029a60183c6eeabec" gracePeriod=30 Sep 30 13:54:01 crc kubenswrapper[4783]: I0930 13:54:01.655372 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerStarted","Data":"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46"} Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.667363 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerID="77c0e75f7160556224744bbf5e36c4c07dcab123b479921029a60183c6eeabec" exitCode=143 Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.667838 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerID="04e369386fa3cd68160f926ead01508468ec157e043755af4551cf1f7224bf89" exitCode=143 Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.667488 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerDied","Data":"77c0e75f7160556224744bbf5e36c4c07dcab123b479921029a60183c6eeabec"} Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.667888 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerDied","Data":"04e369386fa3cd68160f926ead01508468ec157e043755af4551cf1f7224bf89"} Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.669665 4783 generic.go:334] "Generic (PLEG): container finished" podID="7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" containerID="ca7a540125edb5ea8f93e4392a57252e540d0f904cfd8a485cd8c030f537dd2b" exitCode=0 Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.669695 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b48f7" event={"ID":"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d","Type":"ContainerDied","Data":"ca7a540125edb5ea8f93e4392a57252e540d0f904cfd8a485cd8c030f537dd2b"} Sep 30 13:54:02 crc kubenswrapper[4783]: I0930 13:54:02.694547 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.694525311 podStartE2EDuration="5.694525311s" podCreationTimestamp="2025-09-30 13:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:01.68657333 +0000 UTC m=+1141.618039647" watchObservedRunningTime="2025-09-30 13:54:02.694525311 +0000 UTC m=+1142.625991628" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.262014 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.413785 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.413853 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.413892 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgrxb\" (UniqueName: \"kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.413944 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.413999 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.414051 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.414090 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle\") pod \"d4035860-f960-4405-b1b7-33a77b3cb4e5\" (UID: \"d4035860-f960-4405-b1b7-33a77b3cb4e5\") " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.416115 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs" (OuterVolumeSpecName: "logs") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.416577 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.420729 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.421056 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb" (OuterVolumeSpecName: "kube-api-access-qgrxb") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "kube-api-access-qgrxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.434340 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts" (OuterVolumeSpecName: "scripts") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.441932 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.463704 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data" (OuterVolumeSpecName: "config-data") pod "d4035860-f960-4405-b1b7-33a77b3cb4e5" (UID: "d4035860-f960-4405-b1b7-33a77b3cb4e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.515925 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.515954 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.515977 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgrxb\" (UniqueName: \"kubernetes.io/projected/d4035860-f960-4405-b1b7-33a77b3cb4e5-kube-api-access-qgrxb\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.515991 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4035860-f960-4405-b1b7-33a77b3cb4e5-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.516001 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.516011 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.516024 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4035860-f960-4405-b1b7-33a77b3cb4e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.535797 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.617236 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.693675 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d4035860-f960-4405-b1b7-33a77b3cb4e5","Type":"ContainerDied","Data":"0e0c0ee9ecfbbdacaead2c2e26235237dcb3f9ccfd4c6f9631c4e324934329ce"} Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.693740 4783 scope.go:117] "RemoveContainer" containerID="77c0e75f7160556224744bbf5e36c4c07dcab123b479921029a60183c6eeabec" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.693700 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.713632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerStarted","Data":"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae"} Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.713965 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-log" containerID="cri-o://3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" gracePeriod=30 Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.713979 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-httpd" containerID="cri-o://789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" gracePeriod=30 Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.720381 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-th9hn" event={"ID":"f7b8982d-c79a-470e-a0b5-1a8c2e299993","Type":"ContainerDied","Data":"bfdcaaa26c0aafae7f67df9116fac849120c495a3024bcb87d2ec57cb5988177"} Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.720653 4783 generic.go:334] "Generic (PLEG): container finished" podID="f7b8982d-c79a-470e-a0b5-1a8c2e299993" containerID="bfdcaaa26c0aafae7f67df9116fac849120c495a3024bcb87d2ec57cb5988177" exitCode=0 Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.727852 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.745943 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.755438 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:04 crc kubenswrapper[4783]: E0930 13:54:04.755846 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-log" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.755867 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-log" Sep 30 13:54:04 crc kubenswrapper[4783]: E0930 13:54:04.755885 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-httpd" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.755891 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-httpd" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.756046 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-log" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.756066 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" containerName="glance-httpd" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.757092 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.765001 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.765038 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.765058 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.764850826 podStartE2EDuration="7.764850826s" podCreationTimestamp="2025-09-30 13:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:04.746815307 +0000 UTC m=+1144.678281634" watchObservedRunningTime="2025-09-30 13:54:04.764850826 +0000 UTC m=+1144.696317133" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.773055 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.821218 4783 scope.go:117] "RemoveContainer" containerID="04e369386fa3cd68160f926ead01508468ec157e043755af4551cf1f7224bf89" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.854073 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4035860-f960-4405-b1b7-33a77b3cb4e5" path="/var/lib/kubelet/pods/d4035860-f960-4405-b1b7-33a77b3cb4e5/volumes" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.889282 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933112 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933582 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933651 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933746 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933781 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x26f5\" (UniqueName: \"kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933830 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933857 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:04 crc kubenswrapper[4783]: I0930 13:54:04.933936 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035325 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035358 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfwlp\" (UniqueName: \"kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035382 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035446 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035503 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys\") pod \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\" (UID: \"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035707 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035753 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035778 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x26f5\" (UniqueName: \"kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035799 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035825 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035853 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035878 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.035938 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.036785 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.038783 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.039322 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.041819 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.043788 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.054737 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.054833 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts" (OuterVolumeSpecName: "scripts") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.054840 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp" (OuterVolumeSpecName: "kube-api-access-zfwlp") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "kube-api-access-zfwlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.055593 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.057339 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.057703 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.059824 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x26f5\" (UniqueName: \"kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.081138 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.083149 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data" (OuterVolumeSpecName: "config-data") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.097204 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" (UID: "7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139862 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139887 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139896 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfwlp\" (UniqueName: \"kubernetes.io/projected/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-kube-api-access-zfwlp\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139907 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139915 4783 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.139923 4783 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.188846 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.421035 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.547572 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.547810 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.547910 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.547930 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.547982 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.548018 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwbsc\" (UniqueName: \"kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.548299 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run\") pod \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\" (UID: \"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55\") " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.548834 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.549488 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs" (OuterVolumeSpecName: "logs") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.553424 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts" (OuterVolumeSpecName: "scripts") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.553994 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.554415 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc" (OuterVolumeSpecName: "kube-api-access-cwbsc") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "kube-api-access-cwbsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.572484 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.590686 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data" (OuterVolumeSpecName: "config-data") pod "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" (UID: "2ad95b8f-b4a8-44b4-8d2f-1b234e916d55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650412 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650435 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650446 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650455 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwbsc\" (UniqueName: \"kubernetes.io/projected/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-kube-api-access-cwbsc\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650464 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650472 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.650501 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.669342 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.740721 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.744310 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b48f7" event={"ID":"7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d","Type":"ContainerDied","Data":"ef2a2703da3a2b2abc1f75f29a7bc73438f925d7cd20e9b0342815c82f09c0f2"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.744363 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef2a2703da3a2b2abc1f75f29a7bc73438f925d7cd20e9b0342815c82f09c0f2" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.744423 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b48f7" Sep 30 13:54:05 crc kubenswrapper[4783]: W0930 13:54:05.748007 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe403e4a_ed71_41b9_9c17_16a913ecbd8e.slice/crio-64d2adbb39373353d997c2e672da57610cc574d53e32ace7f919b9db90b97079 WatchSource:0}: Error finding container 64d2adbb39373353d997c2e672da57610cc574d53e32ace7f919b9db90b97079: Status 404 returned error can't find the container with id 64d2adbb39373353d997c2e672da57610cc574d53e32ace7f919b9db90b97079 Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751135 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerID="789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" exitCode=143 Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751161 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerID="3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" exitCode=143 Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751264 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751345 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerDied","Data":"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751405 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerDied","Data":"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751448 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2ad95b8f-b4a8-44b4-8d2f-1b234e916d55","Type":"ContainerDied","Data":"0f56ac41f979ebcbdde5651c3fa8c70350e6f59be50319503b6a16bb051bd550"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751470 4783 scope.go:117] "RemoveContainer" containerID="789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.751584 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.760695 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerStarted","Data":"a292c102416e551d8b6ab46833ffcf17c8dc0518b4802aea9fda0c0279f9dbfa"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.768093 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dp2b8" event={"ID":"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672","Type":"ContainerStarted","Data":"850d2970bc8214d5e1d97a9ad7c93fc9c84be59211bd701c81e64b2ee2ab6b0e"} Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.808913 4783 scope.go:117] "RemoveContainer" containerID="3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.824975 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-dp2b8" podStartSLOduration=6.347977985 podStartE2EDuration="13.824952966s" podCreationTimestamp="2025-09-30 13:53:52 +0000 UTC" firstStartedPulling="2025-09-30 13:53:57.536622173 +0000 UTC m=+1137.468088480" lastFinishedPulling="2025-09-30 13:54:05.013597164 +0000 UTC m=+1144.945063461" observedRunningTime="2025-09-30 13:54:05.809805181 +0000 UTC m=+1145.741271528" watchObservedRunningTime="2025-09-30 13:54:05.824952966 +0000 UTC m=+1145.756419273" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.846394 4783 scope.go:117] "RemoveContainer" containerID="789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" Sep 30 13:54:05 crc kubenswrapper[4783]: E0930 13:54:05.850511 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae\": container with ID starting with 789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae not found: ID does not exist" containerID="789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.850563 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae"} err="failed to get container status \"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae\": rpc error: code = NotFound desc = could not find container \"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae\": container with ID starting with 789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae not found: ID does not exist" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.850587 4783 scope.go:117] "RemoveContainer" containerID="3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" Sep 30 13:54:05 crc kubenswrapper[4783]: E0930 13:54:05.859031 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46\": container with ID starting with 3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46 not found: ID does not exist" containerID="3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.859078 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46"} err="failed to get container status \"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46\": rpc error: code = NotFound desc = could not find container \"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46\": container with ID starting with 3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46 not found: ID does not exist" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.859102 4783 scope.go:117] "RemoveContainer" containerID="789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.859618 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae"} err="failed to get container status \"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae\": rpc error: code = NotFound desc = could not find container \"789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae\": container with ID starting with 789e5f7d50e6e3e2e82c488cb06b2c3a761ec28b53b67ad52f1d23b2abecd2ae not found: ID does not exist" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.859639 4783 scope.go:117] "RemoveContainer" containerID="3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.863480 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46"} err="failed to get container status \"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46\": rpc error: code = NotFound desc = could not find container \"3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46\": container with ID starting with 3f60c916639d21cd6c7ceb9bfbf8bac3e4a86432c60e3454a631c34c77b6fa46 not found: ID does not exist" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.865292 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.879240 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.890717 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:05 crc kubenswrapper[4783]: E0930 13:54:05.891078 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-log" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891093 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-log" Sep 30 13:54:05 crc kubenswrapper[4783]: E0930 13:54:05.891104 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" containerName="keystone-bootstrap" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891111 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" containerName="keystone-bootstrap" Sep 30 13:54:05 crc kubenswrapper[4783]: E0930 13:54:05.891123 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-httpd" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891129 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-httpd" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891307 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-log" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891322 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" containerName="keystone-bootstrap" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.891334 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" containerName="glance-httpd" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.892379 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.894792 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.896832 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 13:54:05 crc kubenswrapper[4783]: I0930 13:54:05.916697 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.018904 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.021602 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.024291 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.024342 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.024705 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.025244 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-grwfp" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.025296 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.030135 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.033572 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062347 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062429 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062477 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94xj6\" (UniqueName: \"kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062530 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062565 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062590 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062614 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.062632 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.117869 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-th9hn" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164133 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164181 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164245 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164269 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94xj6\" (UniqueName: \"kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164297 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164322 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164367 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164389 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164415 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164441 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164475 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164496 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164524 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164567 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164591 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frktz\" (UniqueName: \"kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.164618 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.166430 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.166781 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.168441 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.171068 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.171346 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.179680 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.187915 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94xj6\" (UniqueName: \"kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.188833 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.202361 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.215581 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.265946 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts\") pod \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.265991 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle\") pod \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266302 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvwqk\" (UniqueName: \"kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk\") pod \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266330 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data\") pod \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266369 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs\") pod \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\" (UID: \"f7b8982d-c79a-470e-a0b5-1a8c2e299993\") " Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266705 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266733 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266774 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266813 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266832 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frktz\" (UniqueName: \"kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266872 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266909 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.266930 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.268623 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs" (OuterVolumeSpecName: "logs") pod "f7b8982d-c79a-470e-a0b5-1a8c2e299993" (UID: "f7b8982d-c79a-470e-a0b5-1a8c2e299993"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.270583 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk" (OuterVolumeSpecName: "kube-api-access-zvwqk") pod "f7b8982d-c79a-470e-a0b5-1a8c2e299993" (UID: "f7b8982d-c79a-470e-a0b5-1a8c2e299993"). InnerVolumeSpecName "kube-api-access-zvwqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.270910 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts" (OuterVolumeSpecName: "scripts") pod "f7b8982d-c79a-470e-a0b5-1a8c2e299993" (UID: "f7b8982d-c79a-470e-a0b5-1a8c2e299993"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.273034 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.274517 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.274704 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.277168 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.278019 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.279471 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.280067 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.288591 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frktz\" (UniqueName: \"kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz\") pod \"keystone-84fcfd7bf5-qmzxl\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.298105 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data" (OuterVolumeSpecName: "config-data") pod "f7b8982d-c79a-470e-a0b5-1a8c2e299993" (UID: "f7b8982d-c79a-470e-a0b5-1a8c2e299993"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.298903 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7b8982d-c79a-470e-a0b5-1a8c2e299993" (UID: "f7b8982d-c79a-470e-a0b5-1a8c2e299993"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.349763 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.370655 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvwqk\" (UniqueName: \"kubernetes.io/projected/f7b8982d-c79a-470e-a0b5-1a8c2e299993-kube-api-access-zvwqk\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.371208 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.371302 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b8982d-c79a-470e-a0b5-1a8c2e299993-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.371433 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.371508 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b8982d-c79a-470e-a0b5-1a8c2e299993-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:06 crc kubenswrapper[4783]: W0930 13:54:06.779125 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b7447c7_b05c_4a35_99a8_212d2fccfdbb.slice/crio-3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99 WatchSource:0}: Error finding container 3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99: Status 404 returned error can't find the container with id 3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99 Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.790144 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.791532 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerStarted","Data":"64d2adbb39373353d997c2e672da57610cc574d53e32ace7f919b9db90b97079"} Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.800300 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-th9hn" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.800355 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-th9hn" event={"ID":"f7b8982d-c79a-470e-a0b5-1a8c2e299993","Type":"ContainerDied","Data":"f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a"} Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.800389 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f19167a4e80317d2c4ecfd6e41a50d3e1ed7387f52e961a983a22c2623a73b5a" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.833349 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.859625 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad95b8f-b4a8-44b4-8d2f-1b234e916d55" path="/var/lib/kubelet/pods/2ad95b8f-b4a8-44b4-8d2f-1b234e916d55/volumes" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.933361 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:54:06 crc kubenswrapper[4783]: E0930 13:54:06.934584 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b8982d-c79a-470e-a0b5-1a8c2e299993" containerName="placement-db-sync" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.934610 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b8982d-c79a-470e-a0b5-1a8c2e299993" containerName="placement-db-sync" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.934873 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b8982d-c79a-470e-a0b5-1a8c2e299993" containerName="placement-db-sync" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.936556 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.939511 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.939530 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.939784 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2tgtm" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.939941 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.942438 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.960557 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982632 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982694 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982730 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982790 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncnbq\" (UniqueName: \"kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982816 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982856 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:06 crc kubenswrapper[4783]: I0930 13:54:06.982885 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.084450 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.084513 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.084549 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.084715 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.085968 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.086035 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.086152 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncnbq\" (UniqueName: \"kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.085390 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.091131 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.091764 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.092380 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.093107 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.093269 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.109938 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncnbq\" (UniqueName: \"kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq\") pod \"placement-784897656b-2kp66\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.255792 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.674282 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.674378 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:54:07 crc kubenswrapper[4783]: I0930 13:54:07.812305 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerStarted","Data":"3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99"} Sep 30 13:54:08 crc kubenswrapper[4783]: I0930 13:54:08.058705 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:54:08 crc kubenswrapper[4783]: I0930 13:54:08.125100 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:54:08 crc kubenswrapper[4783]: I0930 13:54:08.125423 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" containerID="cri-o://8fb3b92cc631f3136e13ad20c5dbf5838c3909467c0aa5722a2fff2dcaa58088" gracePeriod=10 Sep 30 13:54:10 crc kubenswrapper[4783]: I0930 13:54:10.875319 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Sep 30 13:54:11 crc kubenswrapper[4783]: W0930 13:54:11.780549 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdd4645f_8430_40ad_9539_663a01c74c13.slice/crio-61dda0969a0bd1ae539d326392eba358f098cad90af63c593dc98f7020813d11 WatchSource:0}: Error finding container 61dda0969a0bd1ae539d326392eba358f098cad90af63c593dc98f7020813d11: Status 404 returned error can't find the container with id 61dda0969a0bd1ae539d326392eba358f098cad90af63c593dc98f7020813d11 Sep 30 13:54:11 crc kubenswrapper[4783]: I0930 13:54:11.863870 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84fcfd7bf5-qmzxl" event={"ID":"fdd4645f-8430-40ad-9539-663a01c74c13","Type":"ContainerStarted","Data":"61dda0969a0bd1ae539d326392eba358f098cad90af63c593dc98f7020813d11"} Sep 30 13:54:12 crc kubenswrapper[4783]: I0930 13:54:12.282177 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:54:12 crc kubenswrapper[4783]: I0930 13:54:12.876353 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerStarted","Data":"fb0c68da9baeada510a8e07bf009f63dec05e3b0fd0160cf0f2cd4356e1bafbf"} Sep 30 13:54:12 crc kubenswrapper[4783]: I0930 13:54:12.878669 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerStarted","Data":"a3a24d4d11109348dc5aea74a514101dc4fd4971209e6891ef2e78c506c18182"} Sep 30 13:54:12 crc kubenswrapper[4783]: I0930 13:54:12.880822 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerID="8fb3b92cc631f3136e13ad20c5dbf5838c3909467c0aa5722a2fff2dcaa58088" exitCode=0 Sep 30 13:54:12 crc kubenswrapper[4783]: I0930 13:54:12.880855 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" event={"ID":"6b8e5aea-dccd-4876-8d92-3dab97f8db58","Type":"ContainerDied","Data":"8fb3b92cc631f3136e13ad20c5dbf5838c3909467c0aa5722a2fff2dcaa58088"} Sep 30 13:54:15 crc kubenswrapper[4783]: I0930 13:54:15.919290 4783 generic.go:334] "Generic (PLEG): container finished" podID="c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" containerID="850d2970bc8214d5e1d97a9ad7c93fc9c84be59211bd701c81e64b2ee2ab6b0e" exitCode=0 Sep 30 13:54:15 crc kubenswrapper[4783]: I0930 13:54:15.919386 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dp2b8" event={"ID":"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672","Type":"ContainerDied","Data":"850d2970bc8214d5e1d97a9ad7c93fc9c84be59211bd701c81e64b2ee2ab6b0e"} Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.133336 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.204038 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config\") pod \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.204135 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb\") pod \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.204165 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc\") pod \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.204275 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb\") pod \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.204411 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf7f9\" (UniqueName: \"kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9\") pod \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\" (UID: \"6b8e5aea-dccd-4876-8d92-3dab97f8db58\") " Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.208929 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9" (OuterVolumeSpecName: "kube-api-access-hf7f9") pod "6b8e5aea-dccd-4876-8d92-3dab97f8db58" (UID: "6b8e5aea-dccd-4876-8d92-3dab97f8db58"). InnerVolumeSpecName "kube-api-access-hf7f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.255933 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6b8e5aea-dccd-4876-8d92-3dab97f8db58" (UID: "6b8e5aea-dccd-4876-8d92-3dab97f8db58"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.258467 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6b8e5aea-dccd-4876-8d92-3dab97f8db58" (UID: "6b8e5aea-dccd-4876-8d92-3dab97f8db58"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.260505 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6b8e5aea-dccd-4876-8d92-3dab97f8db58" (UID: "6b8e5aea-dccd-4876-8d92-3dab97f8db58"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.281444 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config" (OuterVolumeSpecName: "config") pod "6b8e5aea-dccd-4876-8d92-3dab97f8db58" (UID: "6b8e5aea-dccd-4876-8d92-3dab97f8db58"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.307338 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf7f9\" (UniqueName: \"kubernetes.io/projected/6b8e5aea-dccd-4876-8d92-3dab97f8db58-kube-api-access-hf7f9\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.307391 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.307413 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.307429 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.307445 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b8e5aea-dccd-4876-8d92-3dab97f8db58-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.962203 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerStarted","Data":"480a200cdc697c8ebf1abd5b620ac48b7758d3e56bb2816ab4f7b5cd9d38f624"} Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.963968 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" event={"ID":"6b8e5aea-dccd-4876-8d92-3dab97f8db58","Type":"ContainerDied","Data":"c259dff0ca0f218b3cc59bca95b91e577164a746ddde2a06446d9c095a8af1f5"} Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.964000 4783 scope.go:117] "RemoveContainer" containerID="8fb3b92cc631f3136e13ad20c5dbf5838c3909467c0aa5722a2fff2dcaa58088" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.964151 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" Sep 30 13:54:19 crc kubenswrapper[4783]: I0930 13:54:19.998062 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:54:20 crc kubenswrapper[4783]: I0930 13:54:20.004609 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p"] Sep 30 13:54:20 crc kubenswrapper[4783]: E0930 13:54:20.786314 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695" Sep 30 13:54:20 crc kubenswrapper[4783]: E0930 13:54:20.786784 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cmkfs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-qcbqm_openstack(04087483-d2dd-4f70-99f1-592a46394263): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:54:20 crc kubenswrapper[4783]: E0930 13:54:20.788067 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-qcbqm" podUID="04087483-d2dd-4f70-99f1-592a46394263" Sep 30 13:54:20 crc kubenswrapper[4783]: I0930 13:54:20.859962 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" path="/var/lib/kubelet/pods/6b8e5aea-dccd-4876-8d92-3dab97f8db58/volumes" Sep 30 13:54:20 crc kubenswrapper[4783]: I0930 13:54:20.876808 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6c6d5d5bd7-vrp8p" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: i/o timeout" Sep 30 13:54:20 crc kubenswrapper[4783]: E0930 13:54:20.975041 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695\\\"\"" pod="openstack/cinder-db-sync-qcbqm" podUID="04087483-d2dd-4f70-99f1-592a46394263" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.542411 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.623057 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlj4l\" (UniqueName: \"kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l\") pod \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.623115 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle\") pod \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.623372 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data\") pod \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\" (UID: \"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672\") " Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.631979 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" (UID: "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.634809 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l" (OuterVolumeSpecName: "kube-api-access-zlj4l") pod "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" (UID: "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672"). InnerVolumeSpecName "kube-api-access-zlj4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.664872 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" (UID: "c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.725425 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlj4l\" (UniqueName: \"kubernetes.io/projected/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-kube-api-access-zlj4l\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.725475 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:24 crc kubenswrapper[4783]: I0930 13:54:24.725493 4783 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.020566 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dp2b8" event={"ID":"c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672","Type":"ContainerDied","Data":"02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f"} Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.020611 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02ab9308f33292014be67b055763857cf95d31e2dee0e974c113876e2ed02c9f" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.020914 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dp2b8" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.821753 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:54:25 crc kubenswrapper[4783]: E0930 13:54:25.822460 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" containerName="barbican-db-sync" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.822477 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" containerName="barbican-db-sync" Sep 30 13:54:25 crc kubenswrapper[4783]: E0930 13:54:25.822490 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.822498 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" Sep 30 13:54:25 crc kubenswrapper[4783]: E0930 13:54:25.822516 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="init" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.822524 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="init" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.822727 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b8e5aea-dccd-4876-8d92-3dab97f8db58" containerName="dnsmasq-dns" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.822743 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" containerName="barbican-db-sync" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.823795 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.826328 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wckjk" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.826729 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.826920 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.890407 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.926485 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.927839 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.931790 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.937492 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.942387 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.945293 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.945733 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.945784 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd7v2\" (UniqueName: \"kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.945828 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.945895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.948924 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:54:25 crc kubenswrapper[4783]: I0930 13:54:25.968770 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.033549 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84fcfd7bf5-qmzxl" event={"ID":"fdd4645f-8430-40ad-9539-663a01c74c13","Type":"ContainerStarted","Data":"83c83549440f0e945a07ebe3f4406f83362c2602f75efd510a99b82f7f1c32fb"} Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.034568 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047356 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047448 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047485 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047508 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj6g8\" (UniqueName: \"kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047541 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047557 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047574 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd7v2\" (UniqueName: \"kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047589 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047635 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047651 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047679 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047720 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047744 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgm92\" (UniqueName: \"kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047792 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047813 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.047869 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.049389 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.062432 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.062799 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.066450 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.084273 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.084889 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd7v2\" (UniqueName: \"kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2\") pod \"barbican-worker-fb9cffd59-bwk45\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.085741 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.088407 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.114268 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-84fcfd7bf5-qmzxl" podStartSLOduration=21.114241649 podStartE2EDuration="21.114241649s" podCreationTimestamp="2025-09-30 13:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:26.062238912 +0000 UTC m=+1165.993705219" watchObservedRunningTime="2025-09-30 13:54:26.114241649 +0000 UTC m=+1166.045707956" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.130112 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149076 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149145 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149171 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149231 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149265 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149282 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149303 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj6g8\" (UniqueName: \"kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149321 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149339 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149355 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149396 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149453 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149471 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149489 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgm92\" (UniqueName: \"kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149511 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nk8l\" (UniqueName: \"kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.149557 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.150308 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.150496 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.150935 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.151577 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.152694 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.153402 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.153657 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.154010 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.155800 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.156039 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.164343 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgm92\" (UniqueName: \"kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92\") pod \"dnsmasq-dns-5c856dc5f9-xnnct\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.166339 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj6g8\" (UniqueName: \"kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8\") pod \"barbican-keystone-listener-95bcf9466-5g2ds\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.250925 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.251036 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.251071 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.251095 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nk8l\" (UniqueName: \"kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.251169 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.251522 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.254574 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.255200 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.257381 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.259096 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.269034 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.269067 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nk8l\" (UniqueName: \"kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l\") pod \"barbican-api-7ff475c6b-qnjjk\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:26 crc kubenswrapper[4783]: I0930 13:54:26.514409 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.304526 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.306813 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.309526 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.310728 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.321361 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392273 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392542 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392659 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392733 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392836 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mftgp\" (UniqueName: \"kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.392993 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.393065 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494704 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494769 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494809 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mftgp\" (UniqueName: \"kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494903 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494921 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494948 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.494962 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.495753 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.501067 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.501408 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.501831 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.501965 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.502387 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.522658 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mftgp\" (UniqueName: \"kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp\") pod \"barbican-api-6d956c456d-krq7k\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:28 crc kubenswrapper[4783]: I0930 13:54:28.635161 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:29 crc kubenswrapper[4783]: I0930 13:54:29.532489 4783 scope.go:117] "RemoveContainer" containerID="0eab26db4441a40d22cd716000814292f29d2fa07c4cf7152707175cdc6b30a8" Sep 30 13:54:29 crc kubenswrapper[4783]: E0930 13:54:29.563512 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1" Sep 30 13:54:29 crc kubenswrapper[4783]: E0930 13:54:29.563639 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2wr5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(8257f606-4b1c-46e2-918e-9ebf1128f6cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.082123 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerStarted","Data":"8d42af40fdf6ba3fbff5a708098b5ffb7371fc3b476c8c5bc057e0dc5815233d"} Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.188690 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.254969 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.259216 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:54:30 crc kubenswrapper[4783]: W0930 13:54:30.259865 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6d61ab4_825a_4fdc_a326_e56f2c72b857.slice/crio-4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f WatchSource:0}: Error finding container 4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f: Status 404 returned error can't find the container with id 4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f Sep 30 13:54:30 crc kubenswrapper[4783]: W0930 13:54:30.260100 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a19af6c_8b2e_41f3_ac68_012bd49e514b.slice/crio-21b8bdd6e308f53994dc3bd9b1fb5b814d96e91bb0b8c98d41fa69c795997897 WatchSource:0}: Error finding container 21b8bdd6e308f53994dc3bd9b1fb5b814d96e91bb0b8c98d41fa69c795997897: Status 404 returned error can't find the container with id 21b8bdd6e308f53994dc3bd9b1fb5b814d96e91bb0b8c98d41fa69c795997897 Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.336338 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:54:30 crc kubenswrapper[4783]: W0930 13:54:30.344413 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0183b5a_e3e9_4689_ab48_ea5b73e0bd60.slice/crio-801f9ce7e6a71de4e8305af57adad8f4501f2a1bbbe2a38fc0dbb1d08f25fa26 WatchSource:0}: Error finding container 801f9ce7e6a71de4e8305af57adad8f4501f2a1bbbe2a38fc0dbb1d08f25fa26: Status 404 returned error can't find the container with id 801f9ce7e6a71de4e8305af57adad8f4501f2a1bbbe2a38fc0dbb1d08f25fa26 Sep 30 13:54:30 crc kubenswrapper[4783]: I0930 13:54:30.405194 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.151811 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerStarted","Data":"8df62f6d21d21c10de4af33338f5e6aaa9331745b50e08b8d0f63b05fcdf0a2f"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.153199 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.153275 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.165029 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerStarted","Data":"c4c2d396cb8c402c092c9d624b22bee0110dc4673e91dc69423eba5248d63bde"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.178195 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerStarted","Data":"ccc560ee8b7452abbe90f3abf8c9d54c86e65c7a9a1531ef75e232716e377ef8"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.178244 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerStarted","Data":"79d24a63ac0119b9fc4aad736a0d1b85827096188af043727ed0edc34743a6c0"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.178257 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerStarted","Data":"4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.178289 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.178819 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.191454 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerStarted","Data":"4f83f9799c9d4c825935802dd1aca89dedb70560c22c76d5f67113a5905b98c8"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.198878 4783 generic.go:334] "Generic (PLEG): container finished" podID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerID="c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350" exitCode=0 Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.198948 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" event={"ID":"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60","Type":"ContainerDied","Data":"c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.198976 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" event={"ID":"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60","Type":"ContainerStarted","Data":"801f9ce7e6a71de4e8305af57adad8f4501f2a1bbbe2a38fc0dbb1d08f25fa26"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.202492 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-784897656b-2kp66" podStartSLOduration=25.202467593 podStartE2EDuration="25.202467593s" podCreationTimestamp="2025-09-30 13:54:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:31.173893168 +0000 UTC m=+1171.105359475" watchObservedRunningTime="2025-09-30 13:54:31.202467593 +0000 UTC m=+1171.133933910" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.208865 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=27.208847758 podStartE2EDuration="27.208847758s" podCreationTimestamp="2025-09-30 13:54:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:31.199762107 +0000 UTC m=+1171.131228424" watchObservedRunningTime="2025-09-30 13:54:31.208847758 +0000 UTC m=+1171.140314055" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.218484 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerStarted","Data":"efe36767497a9fc9e028d17967e730c37c0c7d9e9dda36bf40da68bab84aeea1"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.225572 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fb9cffd59-bwk45" event={"ID":"2a19af6c-8b2e-41f3-ac68-012bd49e514b","Type":"ContainerStarted","Data":"21b8bdd6e308f53994dc3bd9b1fb5b814d96e91bb0b8c98d41fa69c795997897"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.229904 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7ff475c6b-qnjjk" podStartSLOduration=5.229884122 podStartE2EDuration="5.229884122s" podCreationTimestamp="2025-09-30 13:54:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:31.222492125 +0000 UTC m=+1171.153958432" watchObservedRunningTime="2025-09-30 13:54:31.229884122 +0000 UTC m=+1171.161350429" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.235353 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerStarted","Data":"9df990f3d0bfc3752e7528f602f6eb747222541c98468b6d5a79a895a7dfc8ca"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.235409 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerStarted","Data":"083c5d84dc5f5d23c7a9e1a3414dc17b7d8a4bbc02117dde2f658c66f9ee7b94"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.235419 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerStarted","Data":"971b451a27e2cbdaac041ab9142479750b02d192361ae0b52c946286309ed3e5"} Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.235819 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.235940 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.274725 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=26.274592814000002 podStartE2EDuration="26.274592814s" podCreationTimestamp="2025-09-30 13:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:31.265824693 +0000 UTC m=+1171.197291130" watchObservedRunningTime="2025-09-30 13:54:31.274592814 +0000 UTC m=+1171.206059121" Sep 30 13:54:31 crc kubenswrapper[4783]: I0930 13:54:31.313100 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6d956c456d-krq7k" podStartSLOduration=3.313077338 podStartE2EDuration="3.313077338s" podCreationTimestamp="2025-09-30 13:54:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:31.302078245 +0000 UTC m=+1171.233544572" watchObservedRunningTime="2025-09-30 13:54:31.313077338 +0000 UTC m=+1171.244543645" Sep 30 13:54:32 crc kubenswrapper[4783]: I0930 13:54:32.254540 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" event={"ID":"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60","Type":"ContainerStarted","Data":"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f"} Sep 30 13:54:32 crc kubenswrapper[4783]: I0930 13:54:32.255459 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:32 crc kubenswrapper[4783]: I0930 13:54:32.275989 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" podStartSLOduration=7.275970364 podStartE2EDuration="7.275970364s" podCreationTimestamp="2025-09-30 13:54:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:32.268330489 +0000 UTC m=+1172.199796796" watchObservedRunningTime="2025-09-30 13:54:32.275970364 +0000 UTC m=+1172.207436671" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.189569 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.190100 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.190119 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.190131 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.234120 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 13:54:35 crc kubenswrapper[4783]: I0930 13:54:35.253167 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.216579 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.216632 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.216916 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.216963 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.256599 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.271323 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.282992 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.367437 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:54:36 crc kubenswrapper[4783]: I0930 13:54:36.367690 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="dnsmasq-dns" containerID="cri-o://7529745251e90cc3938c592e0145e0f316a4a387a8df7fcf029dedb6c177263b" gracePeriod=10 Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.243599 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.303860 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.437764 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.595469 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.678360 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.678426 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:54:37 crc kubenswrapper[4783]: I0930 13:54:37.964671 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.057896 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.145:5353: connect: connection refused" Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.232939 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.313205 4783 generic.go:334] "Generic (PLEG): container finished" podID="435b0af3-e4bc-417f-a517-03c12786cee4" containerID="7529745251e90cc3938c592e0145e0f316a4a387a8df7fcf029dedb6c177263b" exitCode=0 Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.313255 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" event={"ID":"435b0af3-e4bc-417f-a517-03c12786cee4","Type":"ContainerDied","Data":"7529745251e90cc3938c592e0145e0f316a4a387a8df7fcf029dedb6c177263b"} Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.414831 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.743163 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:38 crc kubenswrapper[4783]: I0930 13:54:38.744009 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 13:54:39 crc kubenswrapper[4783]: I0930 13:54:39.508716 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:39 crc kubenswrapper[4783]: I0930 13:54:39.564569 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-784897656b-2kp66" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.244534 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.247463 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.249887 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-rfwtt" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.250720 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.250997 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.255310 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.353994 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.354324 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v7sm\" (UniqueName: \"kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.354364 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.354445 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.441239 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: E0930 13:54:40.441897 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-6v7sm openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="0b4989ff-7ff2-489c-898c-8a0adc064c01" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.450392 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.456274 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.456440 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.456462 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v7sm\" (UniqueName: \"kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.456486 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.457913 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: E0930 13:54:40.458891 4783 projected.go:194] Error preparing data for projected volume kube-api-access-6v7sm for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 30 13:54:40 crc kubenswrapper[4783]: E0930 13:54:40.458974 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm podName:0b4989ff-7ff2-489c-898c-8a0adc064c01 nodeName:}" failed. No retries permitted until 2025-09-30 13:54:40.958937951 +0000 UTC m=+1180.890404258 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-6v7sm" (UniqueName: "kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm") pod "openstackclient" (UID: "0b4989ff-7ff2-489c-898c-8a0adc064c01") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.463058 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.465595 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.510619 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.512816 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.523390 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.659869 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.659959 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.660003 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.660036 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlmc5\" (UniqueName: \"kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.761473 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.761545 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.761577 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlmc5\" (UniqueName: \"kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.761650 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.762317 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.768735 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.776915 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.780785 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlmc5\" (UniqueName: \"kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5\") pod \"openstackclient\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.877750 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: I0930 13:54:40.964384 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v7sm\" (UniqueName: \"kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm\") pod \"openstackclient\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " pod="openstack/openstackclient" Sep 30 13:54:40 crc kubenswrapper[4783]: E0930 13:54:40.974122 4783 projected.go:194] Error preparing data for projected volume kube-api-access-6v7sm for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (0b4989ff-7ff2-489c-898c-8a0adc064c01) does not match the UID in record. The object might have been deleted and then recreated Sep 30 13:54:40 crc kubenswrapper[4783]: E0930 13:54:40.974493 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm podName:0b4989ff-7ff2-489c-898c-8a0adc064c01 nodeName:}" failed. No retries permitted until 2025-09-30 13:54:41.974473077 +0000 UTC m=+1181.905939384 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-6v7sm" (UniqueName: "kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm") pod "openstackclient" (UID: "0b4989ff-7ff2-489c-898c-8a0adc064c01") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (0b4989ff-7ff2-489c-898c-8a0adc064c01) does not match the UID in record. The object might have been deleted and then recreated Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.343128 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.353784 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.357413 4783 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="0b4989ff-7ff2-489c-898c-8a0adc064c01" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.472876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle\") pod \"0b4989ff-7ff2-489c-898c-8a0adc064c01\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.472966 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret\") pod \"0b4989ff-7ff2-489c-898c-8a0adc064c01\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.473048 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config\") pod \"0b4989ff-7ff2-489c-898c-8a0adc064c01\" (UID: \"0b4989ff-7ff2-489c-898c-8a0adc064c01\") " Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.473614 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v7sm\" (UniqueName: \"kubernetes.io/projected/0b4989ff-7ff2-489c-898c-8a0adc064c01-kube-api-access-6v7sm\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.473795 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "0b4989ff-7ff2-489c-898c-8a0adc064c01" (UID: "0b4989ff-7ff2-489c-898c-8a0adc064c01"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.490834 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "0b4989ff-7ff2-489c-898c-8a0adc064c01" (UID: "0b4989ff-7ff2-489c-898c-8a0adc064c01"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.491548 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b4989ff-7ff2-489c-898c-8a0adc064c01" (UID: "0b4989ff-7ff2-489c-898c-8a0adc064c01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.574674 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.574712 4783 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.574723 4783 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b4989ff-7ff2-489c-898c-8a0adc064c01-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.771763 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.795277 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.857947 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.858167 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" containerID="cri-o://79d24a63ac0119b9fc4aad736a0d1b85827096188af043727ed0edc34743a6c0" gracePeriod=30 Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.858307 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" containerID="cri-o://ccc560ee8b7452abbe90f3abf8c9d54c86e65c7a9a1531ef75e232716e377ef8" gracePeriod=30 Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.890107 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": EOF" Sep 30 13:54:41 crc kubenswrapper[4783]: I0930 13:54:41.890373 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": EOF" Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.352951 4783 generic.go:334] "Generic (PLEG): container finished" podID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerID="79d24a63ac0119b9fc4aad736a0d1b85827096188af043727ed0edc34743a6c0" exitCode=143 Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.353054 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerDied","Data":"79d24a63ac0119b9fc4aad736a0d1b85827096188af043727ed0edc34743a6c0"} Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.353580 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.365204 4783 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="0b4989ff-7ff2-489c-898c-8a0adc064c01" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" Sep 30 13:54:42 crc kubenswrapper[4783]: E0930 13:54:42.590177 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48" Sep 30 13:54:42 crc kubenswrapper[4783]: E0930 13:54:42.590548 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2wr5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(8257f606-4b1c-46e2-918e-9ebf1128f6cc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 13:54:42 crc kubenswrapper[4783]: E0930 13:54:42.591851 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.868434 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b4989ff-7ff2-489c-898c-8a0adc064c01" path="/var/lib/kubelet/pods/0b4989ff-7ff2-489c-898c-8a0adc064c01/volumes" Sep 30 13:54:42 crc kubenswrapper[4783]: I0930 13:54:42.915646 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.013635 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.014017 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.014079 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-586sq\" (UniqueName: \"kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.014107 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.014139 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.014205 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config\") pod \"435b0af3-e4bc-417f-a517-03c12786cee4\" (UID: \"435b0af3-e4bc-417f-a517-03c12786cee4\") " Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.025887 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq" (OuterVolumeSpecName: "kube-api-access-586sq") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "kube-api-access-586sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.081453 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.116691 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.117947 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-586sq\" (UniqueName: \"kubernetes.io/projected/435b0af3-e4bc-417f-a517-03c12786cee4-kube-api-access-586sq\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.117987 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.118015 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.152255 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.155534 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.167605 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config" (OuterVolumeSpecName: "config") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.168877 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "435b0af3-e4bc-417f-a517-03c12786cee4" (UID: "435b0af3-e4bc-417f-a517-03c12786cee4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.220127 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.220169 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.220183 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/435b0af3-e4bc-417f-a517-03c12786cee4-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.362913 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"05290e3e-89c9-4073-96b6-e97a289f4431","Type":"ContainerStarted","Data":"1e3d2272e6cccf21a8922739e5559b0ad79c33d15bbbee1eb77222e1d2628c14"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.364417 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerStarted","Data":"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.364449 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerStarted","Data":"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.366132 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" event={"ID":"435b0af3-e4bc-417f-a517-03c12786cee4","Type":"ContainerDied","Data":"cc595e34910e5207b50fa76818c55f6300cb6a031a23f4bf072d265db73b5bb7"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.366157 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5674f66f87-q5c7d" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.366162 4783 scope.go:117] "RemoveContainer" containerID="7529745251e90cc3938c592e0145e0f316a4a387a8df7fcf029dedb6c177263b" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.367710 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fb9cffd59-bwk45" event={"ID":"2a19af6c-8b2e-41f3-ac68-012bd49e514b","Type":"ContainerStarted","Data":"433d915c50b0d185b319c45fc33233e3e3c0ab13a0ec0a6aef298225900bcb06"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.367736 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fb9cffd59-bwk45" event={"ID":"2a19af6c-8b2e-41f3-ac68-012bd49e514b","Type":"ContainerStarted","Data":"60da69babbe7c125d7aac96c30abeffc6a81804a11c64e08329bc23563951526"} Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.367785 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-notification-agent" containerID="cri-o://a292c102416e551d8b6ab46833ffcf17c8dc0518b4802aea9fda0c0279f9dbfa" gracePeriod=30 Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.367777 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-central-agent" containerID="cri-o://890c5af0166010b924f1827b8ebfc5b1431692aedf4260f23c576544efc94058" gracePeriod=30 Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.390130 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" podStartSLOduration=15.97268973 podStartE2EDuration="18.390109914s" podCreationTimestamp="2025-09-30 13:54:25 +0000 UTC" firstStartedPulling="2025-09-30 13:54:30.423665024 +0000 UTC m=+1170.355131331" lastFinishedPulling="2025-09-30 13:54:32.841085218 +0000 UTC m=+1172.772551515" observedRunningTime="2025-09-30 13:54:43.386356643 +0000 UTC m=+1183.317822950" watchObservedRunningTime="2025-09-30 13:54:43.390109914 +0000 UTC m=+1183.321576221" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.399602 4783 scope.go:117] "RemoveContainer" containerID="4e4c72cb57a3437b4f2c65d5e2a4bdc02b29584b0bd555a7d8a99d90c5971373" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.409396 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-fb9cffd59-bwk45" podStartSLOduration=15.835126923 podStartE2EDuration="18.409378311s" podCreationTimestamp="2025-09-30 13:54:25 +0000 UTC" firstStartedPulling="2025-09-30 13:54:30.262600384 +0000 UTC m=+1170.194066691" lastFinishedPulling="2025-09-30 13:54:32.836851772 +0000 UTC m=+1172.768318079" observedRunningTime="2025-09-30 13:54:43.404641909 +0000 UTC m=+1183.336108216" watchObservedRunningTime="2025-09-30 13:54:43.409378311 +0000 UTC m=+1183.340844618" Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.428370 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:54:43 crc kubenswrapper[4783]: I0930 13:54:43.435209 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5674f66f87-q5c7d"] Sep 30 13:54:44 crc kubenswrapper[4783]: I0930 13:54:44.408895 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qcbqm" event={"ID":"04087483-d2dd-4f70-99f1-592a46394263","Type":"ContainerStarted","Data":"1022d5e424762d7e29acf4475bea4e13229a028b0e4feddd4ba6cfac7675e760"} Sep 30 13:54:44 crc kubenswrapper[4783]: I0930 13:54:44.417463 4783 generic.go:334] "Generic (PLEG): container finished" podID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerID="890c5af0166010b924f1827b8ebfc5b1431692aedf4260f23c576544efc94058" exitCode=0 Sep 30 13:54:44 crc kubenswrapper[4783]: I0930 13:54:44.417853 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerDied","Data":"890c5af0166010b924f1827b8ebfc5b1431692aedf4260f23c576544efc94058"} Sep 30 13:54:44 crc kubenswrapper[4783]: I0930 13:54:44.427940 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-qcbqm" podStartSLOduration=7.284927351 podStartE2EDuration="52.42792167s" podCreationTimestamp="2025-09-30 13:53:52 +0000 UTC" firstStartedPulling="2025-09-30 13:53:57.532297895 +0000 UTC m=+1137.463764202" lastFinishedPulling="2025-09-30 13:54:42.675292204 +0000 UTC m=+1182.606758521" observedRunningTime="2025-09-30 13:54:44.426755223 +0000 UTC m=+1184.358221530" watchObservedRunningTime="2025-09-30 13:54:44.42792167 +0000 UTC m=+1184.359387997" Sep 30 13:54:44 crc kubenswrapper[4783]: I0930 13:54:44.853184 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" path="/var/lib/kubelet/pods/435b0af3-e4bc-417f-a517-03c12786cee4/volumes" Sep 30 13:54:47 crc kubenswrapper[4783]: I0930 13:54:47.314197 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:55950->10.217.0.155:9311: read: connection reset by peer" Sep 30 13:54:47 crc kubenswrapper[4783]: I0930 13:54:47.314287 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:55948->10.217.0.155:9311: read: connection reset by peer" Sep 30 13:54:49 crc kubenswrapper[4783]: I0930 13:54:49.497969 4783 generic.go:334] "Generic (PLEG): container finished" podID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerID="ccc560ee8b7452abbe90f3abf8c9d54c86e65c7a9a1531ef75e232716e377ef8" exitCode=0 Sep 30 13:54:49 crc kubenswrapper[4783]: I0930 13:54:49.498028 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerDied","Data":"ccc560ee8b7452abbe90f3abf8c9d54c86e65c7a9a1531ef75e232716e377ef8"} Sep 30 13:54:50 crc kubenswrapper[4783]: I0930 13:54:50.513097 4783 generic.go:334] "Generic (PLEG): container finished" podID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerID="a292c102416e551d8b6ab46833ffcf17c8dc0518b4802aea9fda0c0279f9dbfa" exitCode=0 Sep 30 13:54:50 crc kubenswrapper[4783]: I0930 13:54:50.513173 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerDied","Data":"a292c102416e551d8b6ab46833ffcf17c8dc0518b4802aea9fda0c0279f9dbfa"} Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.424204 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:54:55 crc kubenswrapper[4783]: E0930 13:54:55.429649 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="dnsmasq-dns" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.429684 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="dnsmasq-dns" Sep 30 13:54:55 crc kubenswrapper[4783]: E0930 13:54:55.429707 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="init" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.429716 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="init" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.432519 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="435b0af3-e4bc-417f-a517-03c12786cee4" containerName="dnsmasq-dns" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.434194 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.437779 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.438780 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.439149 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.454193 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.576951 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577283 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577455 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577565 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577647 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbxqz\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577698 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577823 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.577869 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680051 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680171 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680210 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680265 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680293 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbxqz\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680313 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680362 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.680385 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.681050 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.681525 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.688878 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.689781 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.690129 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.698313 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.698802 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.701841 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbxqz\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz\") pod \"swift-proxy-9d8545ff7-pqd2t\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:55 crc kubenswrapper[4783]: I0930 13:54:55.763111 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.515427 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.515445 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.515790 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.571340 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8257f606-4b1c-46e2-918e-9ebf1128f6cc","Type":"ContainerDied","Data":"9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5"} Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.571564 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e11ccd72f03979cdbd68b458335dbbd2a0cf5c9797b4572a675ade551bf61f5" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.573498 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ff475c6b-qnjjk" event={"ID":"a6d61ab4-825a-4fdc-a326-e56f2c72b857","Type":"ContainerDied","Data":"4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f"} Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.573643 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b702f7cd1cfe698857c7c843fcc4375ff810fcc54eef3b726b3a94bc91fbd7f" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.670782 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.708795 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:56 crc kubenswrapper[4783]: E0930 13:54:56.767514 4783 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:80b8547cf5821a4eb5461d1ac14edbc700ef03926268af960bf511647de027af" Sep 30 13:54:56 crc kubenswrapper[4783]: E0930 13:54:56.767724 4783 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:80b8547cf5821a4eb5461d1ac14edbc700ef03926268af960bf511647de027af,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n676h85h56chb5h5f7h54bh5dch646h94h66h56h5fbhcch696hb6h567h5c9h545h665h6fh588hb6h59bh75h67hbbh58bh99h647h66bh664h667q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hlmc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(05290e3e-89c9-4073-96b6-e97a289f4431): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 13:54:56 crc kubenswrapper[4783]: E0930 13:54:56.768937 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803686 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803745 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803834 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nk8l\" (UniqueName: \"kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l\") pod \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803855 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data\") pod \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803872 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs\") pod \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803930 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.803965 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804050 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom\") pod \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804071 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2wr5\" (UniqueName: \"kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804108 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804129 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle\") pod \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\" (UID: \"a6d61ab4-825a-4fdc-a326-e56f2c72b857\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804148 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle\") pod \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\" (UID: \"8257f606-4b1c-46e2-918e-9ebf1128f6cc\") " Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.804175 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.805089 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.805177 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.805202 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs" (OuterVolumeSpecName: "logs") pod "a6d61ab4-825a-4fdc-a326-e56f2c72b857" (UID: "a6d61ab4-825a-4fdc-a326-e56f2c72b857"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.812461 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.812506 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l" (OuterVolumeSpecName: "kube-api-access-4nk8l") pod "a6d61ab4-825a-4fdc-a326-e56f2c72b857" (UID: "a6d61ab4-825a-4fdc-a326-e56f2c72b857"). InnerVolumeSpecName "kube-api-access-4nk8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.812627 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts" (OuterVolumeSpecName: "scripts") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.820871 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5" (OuterVolumeSpecName: "kube-api-access-j2wr5") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "kube-api-access-j2wr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.825180 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a6d61ab4-825a-4fdc-a326-e56f2c72b857" (UID: "a6d61ab4-825a-4fdc-a326-e56f2c72b857"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.864732 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6d61ab4-825a-4fdc-a326-e56f2c72b857" (UID: "a6d61ab4-825a-4fdc-a326-e56f2c72b857"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.866526 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data" (OuterVolumeSpecName: "config-data") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.871031 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8257f606-4b1c-46e2-918e-9ebf1128f6cc" (UID: "8257f606-4b1c-46e2-918e-9ebf1128f6cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.888435 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data" (OuterVolumeSpecName: "config-data") pod "a6d61ab4-825a-4fdc-a326-e56f2c72b857" (UID: "a6d61ab4-825a-4fdc-a326-e56f2c72b857"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907058 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907099 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907114 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907127 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907139 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nk8l\" (UniqueName: \"kubernetes.io/projected/a6d61ab4-825a-4fdc-a326-e56f2c72b857-kube-api-access-4nk8l\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907150 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907161 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d61ab4-825a-4fdc-a326-e56f2c72b857-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907172 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8257f606-4b1c-46e2-918e-9ebf1128f6cc-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907182 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8257f606-4b1c-46e2-918e-9ebf1128f6cc-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907194 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d61ab4-825a-4fdc-a326-e56f2c72b857-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:56 crc kubenswrapper[4783]: I0930 13:54:56.907209 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2wr5\" (UniqueName: \"kubernetes.io/projected/8257f606-4b1c-46e2-918e-9ebf1128f6cc-kube-api-access-j2wr5\") on node \"crc\" DevicePath \"\"" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.191049 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:54:57 crc kubenswrapper[4783]: W0930 13:54:57.204469 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccf790ec_b4f7_4734_92a0_929ed51c08ec.slice/crio-b210b69fc0933c8dd94ec666e1cefeb578815c1f37e23b423f4a4668a7f7f6dc WatchSource:0}: Error finding container b210b69fc0933c8dd94ec666e1cefeb578815c1f37e23b423f4a4668a7f7f6dc: Status 404 returned error can't find the container with id b210b69fc0933c8dd94ec666e1cefeb578815c1f37e23b423f4a4668a7f7f6dc Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.586925 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerStarted","Data":"b210b69fc0933c8dd94ec666e1cefeb578815c1f37e23b423f4a4668a7f7f6dc"} Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.586996 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ff475c6b-qnjjk" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.587103 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: E0930 13:54:57.593424 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:80b8547cf5821a4eb5461d1ac14edbc700ef03926268af960bf511647de027af\\\"\"" pod="openstack/openstackclient" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.669355 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.679548 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7ff475c6b-qnjjk"] Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.727536 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.735215 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.752188 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:54:57 crc kubenswrapper[4783]: E0930 13:54:57.752688 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.752709 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" Sep 30 13:54:57 crc kubenswrapper[4783]: E0930 13:54:57.752728 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-central-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.752737 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-central-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: E0930 13:54:57.752773 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.752781 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" Sep 30 13:54:57 crc kubenswrapper[4783]: E0930 13:54:57.752796 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-notification-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.752804 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-notification-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.753022 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.753336 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-central-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.753369 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" containerName="ceilometer-notification-agent" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.753388 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.755707 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.758343 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.761147 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.783538 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.825208 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8bc\" (UniqueName: \"kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829533 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829675 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829718 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829803 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.829853 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931268 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931312 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931351 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931373 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931416 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931438 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.931488 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8bc\" (UniqueName: \"kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.933281 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.933313 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.937970 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.938494 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.938993 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.944041 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:57 crc kubenswrapper[4783]: I0930 13:54:57.951262 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8bc\" (UniqueName: \"kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc\") pod \"ceilometer-0\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " pod="openstack/ceilometer-0" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.084742 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.599842 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerStarted","Data":"bc07330040a034c8f60b202b084c69f15e451000d8ccb782c009041fba8c604d"} Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.600166 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerStarted","Data":"4a6acb631ade5965dc80487c2617529e44bce90e5a8b1f824ba3aef899ae630c"} Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.600256 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.600329 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.632580 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-9d8545ff7-pqd2t" podStartSLOduration=3.632555157 podStartE2EDuration="3.632555157s" podCreationTimestamp="2025-09-30 13:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:54:58.627433793 +0000 UTC m=+1198.558900100" watchObservedRunningTime="2025-09-30 13:54:58.632555157 +0000 UTC m=+1198.564021504" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.689012 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:54:58 crc kubenswrapper[4783]: W0930 13:54:58.716698 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65cf96cf_4b72_4cc3_8940_55f1d45c707c.slice/crio-e1d3b61e90548d75d4c174674e6f9eb5617cedf6042da710c410724d3daf7ebc WatchSource:0}: Error finding container e1d3b61e90548d75d4c174674e6f9eb5617cedf6042da710c410724d3daf7ebc: Status 404 returned error can't find the container with id e1d3b61e90548d75d4c174674e6f9eb5617cedf6042da710c410724d3daf7ebc Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.859736 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8257f606-4b1c-46e2-918e-9ebf1128f6cc" path="/var/lib/kubelet/pods/8257f606-4b1c-46e2-918e-9ebf1128f6cc/volumes" Sep 30 13:54:58 crc kubenswrapper[4783]: I0930 13:54:58.860652 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" path="/var/lib/kubelet/pods/a6d61ab4-825a-4fdc-a326-e56f2c72b857/volumes" Sep 30 13:54:59 crc kubenswrapper[4783]: I0930 13:54:59.617407 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerStarted","Data":"e1d3b61e90548d75d4c174674e6f9eb5617cedf6042da710c410724d3daf7ebc"} Sep 30 13:55:01 crc kubenswrapper[4783]: I0930 13:55:01.516726 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:55:01 crc kubenswrapper[4783]: I0930 13:55:01.516814 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ff475c6b-qnjjk" podUID="a6d61ab4-825a-4fdc-a326-e56f2c72b857" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:55:05 crc kubenswrapper[4783]: I0930 13:55:05.778689 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:55:05 crc kubenswrapper[4783]: I0930 13:55:05.780456 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:55:07 crc kubenswrapper[4783]: I0930 13:55:07.673773 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:55:07 crc kubenswrapper[4783]: I0930 13:55:07.674286 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:55:07 crc kubenswrapper[4783]: I0930 13:55:07.674373 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:55:07 crc kubenswrapper[4783]: I0930 13:55:07.675504 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:55:07 crc kubenswrapper[4783]: I0930 13:55:07.675634 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8" gracePeriod=600 Sep 30 13:55:08 crc kubenswrapper[4783]: I0930 13:55:08.750434 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8" exitCode=0 Sep 30 13:55:08 crc kubenswrapper[4783]: I0930 13:55:08.750499 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8"} Sep 30 13:55:08 crc kubenswrapper[4783]: I0930 13:55:08.750839 4783 scope.go:117] "RemoveContainer" containerID="6af773030061195dc9f5fe7c2469df68133a2624856a154eaac950572277b0cc" Sep 30 13:55:13 crc kubenswrapper[4783]: I0930 13:55:13.464669 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:14 crc kubenswrapper[4783]: I0930 13:55:14.812957 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerStarted","Data":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} Sep 30 13:55:14 crc kubenswrapper[4783]: I0930 13:55:14.815037 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd"} Sep 30 13:55:15 crc kubenswrapper[4783]: I0930 13:55:15.827254 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"05290e3e-89c9-4073-96b6-e97a289f4431","Type":"ContainerStarted","Data":"8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4"} Sep 30 13:55:15 crc kubenswrapper[4783]: I0930 13:55:15.831059 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerStarted","Data":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} Sep 30 13:55:15 crc kubenswrapper[4783]: I0930 13:55:15.847580 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=4.4013000380000005 podStartE2EDuration="35.847560163s" podCreationTimestamp="2025-09-30 13:54:40 +0000 UTC" firstStartedPulling="2025-09-30 13:54:43.170656153 +0000 UTC m=+1183.102122460" lastFinishedPulling="2025-09-30 13:55:14.616916268 +0000 UTC m=+1214.548382585" observedRunningTime="2025-09-30 13:55:15.846398405 +0000 UTC m=+1215.777864722" watchObservedRunningTime="2025-09-30 13:55:15.847560163 +0000 UTC m=+1215.779026480" Sep 30 13:55:16 crc kubenswrapper[4783]: I0930 13:55:16.840995 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerStarted","Data":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.863866 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerStarted","Data":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.864358 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.864103 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-notification-agent" containerID="cri-o://098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" gracePeriod=30 Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.864015 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-central-agent" containerID="cri-o://a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" gracePeriod=30 Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.864182 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="sg-core" containerID="cri-o://a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" gracePeriod=30 Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.864162 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="proxy-httpd" containerID="cri-o://438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" gracePeriod=30 Sep 30 13:55:18 crc kubenswrapper[4783]: I0930 13:55:18.889731 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.714057603 podStartE2EDuration="21.8897173s" podCreationTimestamp="2025-09-30 13:54:57 +0000 UTC" firstStartedPulling="2025-09-30 13:54:58.720828975 +0000 UTC m=+1198.652295322" lastFinishedPulling="2025-09-30 13:55:17.896488712 +0000 UTC m=+1217.827955019" observedRunningTime="2025-09-30 13:55:18.887635324 +0000 UTC m=+1218.819101641" watchObservedRunningTime="2025-09-30 13:55:18.8897173 +0000 UTC m=+1218.821183607" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.670250 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.766844 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.766940 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.766976 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf8bc\" (UniqueName: \"kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767028 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767117 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767245 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767291 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts\") pod \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\" (UID: \"65cf96cf-4b72-4cc3-8940-55f1d45c707c\") " Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767699 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767850 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.767978 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.772859 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts" (OuterVolumeSpecName: "scripts") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.773543 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc" (OuterVolumeSpecName: "kube-api-access-zf8bc") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "kube-api-access-zf8bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.801449 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.849663 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.864452 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data" (OuterVolumeSpecName: "config-data") pod "65cf96cf-4b72-4cc3-8940-55f1d45c707c" (UID: "65cf96cf-4b72-4cc3-8940-55f1d45c707c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869205 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869248 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65cf96cf-4b72-4cc3-8940-55f1d45c707c-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869259 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf8bc\" (UniqueName: \"kubernetes.io/projected/65cf96cf-4b72-4cc3-8940-55f1d45c707c-kube-api-access-zf8bc\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869269 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869278 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.869286 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cf96cf-4b72-4cc3-8940-55f1d45c707c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874541 4783 generic.go:334] "Generic (PLEG): container finished" podID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" exitCode=0 Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874574 4783 generic.go:334] "Generic (PLEG): container finished" podID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" exitCode=2 Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874584 4783 generic.go:334] "Generic (PLEG): container finished" podID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" exitCode=0 Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874593 4783 generic.go:334] "Generic (PLEG): container finished" podID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" exitCode=0 Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874644 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerDied","Data":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874678 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerDied","Data":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874694 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerDied","Data":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874707 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerDied","Data":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874719 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65cf96cf-4b72-4cc3-8940-55f1d45c707c","Type":"ContainerDied","Data":"e1d3b61e90548d75d4c174674e6f9eb5617cedf6042da710c410724d3daf7ebc"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874738 4783 scope.go:117] "RemoveContainer" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.874898 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.877960 4783 generic.go:334] "Generic (PLEG): container finished" podID="04087483-d2dd-4f70-99f1-592a46394263" containerID="1022d5e424762d7e29acf4475bea4e13229a028b0e4feddd4ba6cfac7675e760" exitCode=0 Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.878090 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qcbqm" event={"ID":"04087483-d2dd-4f70-99f1-592a46394263","Type":"ContainerDied","Data":"1022d5e424762d7e29acf4475bea4e13229a028b0e4feddd4ba6cfac7675e760"} Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.911695 4783 scope.go:117] "RemoveContainer" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.919535 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.926457 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.941169 4783 scope.go:117] "RemoveContainer" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.955662 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:19 crc kubenswrapper[4783]: E0930 13:55:19.961051 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="proxy-httpd" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961173 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="proxy-httpd" Sep 30 13:55:19 crc kubenswrapper[4783]: E0930 13:55:19.961271 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-central-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961323 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-central-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: E0930 13:55:19.961373 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="sg-core" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961446 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="sg-core" Sep 30 13:55:19 crc kubenswrapper[4783]: E0930 13:55:19.961503 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-notification-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961561 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-notification-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961803 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-central-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961863 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="proxy-httpd" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961916 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="ceilometer-notification-agent" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.961971 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" containerName="sg-core" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.963504 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.967567 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.967886 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.972833 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:19 crc kubenswrapper[4783]: I0930 13:55:19.977240 4783 scope.go:117] "RemoveContainer" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.060217 4783 scope.go:117] "RemoveContainer" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:20 crc kubenswrapper[4783]: E0930 13:55:20.061106 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": container with ID starting with 438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751 not found: ID does not exist" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.061159 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} err="failed to get container status \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": rpc error: code = NotFound desc = could not find container \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": container with ID starting with 438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.061194 4783 scope.go:117] "RemoveContainer" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:20 crc kubenswrapper[4783]: E0930 13:55:20.061792 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": container with ID starting with a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88 not found: ID does not exist" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.061831 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} err="failed to get container status \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": rpc error: code = NotFound desc = could not find container \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": container with ID starting with a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.061858 4783 scope.go:117] "RemoveContainer" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:20 crc kubenswrapper[4783]: E0930 13:55:20.069371 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": container with ID starting with 098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc not found: ID does not exist" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.069399 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} err="failed to get container status \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": rpc error: code = NotFound desc = could not find container \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": container with ID starting with 098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.069417 4783 scope.go:117] "RemoveContainer" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: E0930 13:55:20.069781 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": container with ID starting with a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e not found: ID does not exist" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.069869 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} err="failed to get container status \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": rpc error: code = NotFound desc = could not find container \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": container with ID starting with a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.069940 4783 scope.go:117] "RemoveContainer" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.070335 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} err="failed to get container status \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": rpc error: code = NotFound desc = could not find container \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": container with ID starting with 438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.070357 4783 scope.go:117] "RemoveContainer" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.070573 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} err="failed to get container status \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": rpc error: code = NotFound desc = could not find container \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": container with ID starting with a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.070594 4783 scope.go:117] "RemoveContainer" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.073745 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} err="failed to get container status \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": rpc error: code = NotFound desc = could not find container \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": container with ID starting with 098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.073767 4783 scope.go:117] "RemoveContainer" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.074648 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.074746 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.074854 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwv2w\" (UniqueName: \"kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.074932 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.074755 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} err="failed to get container status \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": rpc error: code = NotFound desc = could not find container \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": container with ID starting with a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075014 4783 scope.go:117] "RemoveContainer" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075174 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075284 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075371 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075300 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} err="failed to get container status \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": rpc error: code = NotFound desc = could not find container \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": container with ID starting with 438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.075576 4783 scope.go:117] "RemoveContainer" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.076354 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} err="failed to get container status \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": rpc error: code = NotFound desc = could not find container \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": container with ID starting with a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.076383 4783 scope.go:117] "RemoveContainer" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.076627 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} err="failed to get container status \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": rpc error: code = NotFound desc = could not find container \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": container with ID starting with 098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.076700 4783 scope.go:117] "RemoveContainer" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.076983 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} err="failed to get container status \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": rpc error: code = NotFound desc = could not find container \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": container with ID starting with a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.077004 4783 scope.go:117] "RemoveContainer" containerID="438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.077635 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751"} err="failed to get container status \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": rpc error: code = NotFound desc = could not find container \"438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751\": container with ID starting with 438d5d5f8fec04768c319775fb057e3ed6c39cb196983e8457ea6d8e6ac03751 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.077657 4783 scope.go:117] "RemoveContainer" containerID="a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.077899 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88"} err="failed to get container status \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": rpc error: code = NotFound desc = could not find container \"a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88\": container with ID starting with a68dc26a23ec0c8b1415ec6e9e97446f00846852677cc45c9845ddf236cffe88 not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.077970 4783 scope.go:117] "RemoveContainer" containerID="098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.078208 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc"} err="failed to get container status \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": rpc error: code = NotFound desc = could not find container \"098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc\": container with ID starting with 098a4647c1c65d28139a4aa3d4882b12ac1de0037b054db3638e103ffcf6eedc not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.078308 4783 scope.go:117] "RemoveContainer" containerID="a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.078630 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e"} err="failed to get container status \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": rpc error: code = NotFound desc = could not find container \"a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e\": container with ID starting with a2937a42aa9005a99c3be8115e8821f862436e68ec58dd30e5c9b6b035d7dd2e not found: ID does not exist" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.176673 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwv2w\" (UniqueName: \"kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.176967 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178125 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178244 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178371 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178483 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178555 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.178694 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.179263 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.181999 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.182069 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.182364 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.189472 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.197539 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwv2w\" (UniqueName: \"kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w\") pod \"ceilometer-0\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.345608 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.807935 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:20 crc kubenswrapper[4783]: W0930 13:55:20.814369 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod591731a6_650f_464f_90dd_8d5822adc81a.slice/crio-3ad822ad6e59e6cfbd45e84fbe667202fcc1ab259fc6d425ce857a05639b8e9f WatchSource:0}: Error finding container 3ad822ad6e59e6cfbd45e84fbe667202fcc1ab259fc6d425ce857a05639b8e9f: Status 404 returned error can't find the container with id 3ad822ad6e59e6cfbd45e84fbe667202fcc1ab259fc6d425ce857a05639b8e9f Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.854841 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65cf96cf-4b72-4cc3-8940-55f1d45c707c" path="/var/lib/kubelet/pods/65cf96cf-4b72-4cc3-8940-55f1d45c707c/volumes" Sep 30 13:55:20 crc kubenswrapper[4783]: I0930 13:55:20.893650 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerStarted","Data":"3ad822ad6e59e6cfbd45e84fbe667202fcc1ab259fc6d425ce857a05639b8e9f"} Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.263187 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399247 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmkfs\" (UniqueName: \"kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399339 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399375 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399433 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399509 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.399599 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data\") pod \"04087483-d2dd-4f70-99f1-592a46394263\" (UID: \"04087483-d2dd-4f70-99f1-592a46394263\") " Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.400204 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.413553 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts" (OuterVolumeSpecName: "scripts") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.420424 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs" (OuterVolumeSpecName: "kube-api-access-cmkfs") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "kube-api-access-cmkfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.428426 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.467782 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.487350 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data" (OuterVolumeSpecName: "config-data") pod "04087483-d2dd-4f70-99f1-592a46394263" (UID: "04087483-d2dd-4f70-99f1-592a46394263"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501652 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501691 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501701 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmkfs\" (UniqueName: \"kubernetes.io/projected/04087483-d2dd-4f70-99f1-592a46394263-kube-api-access-cmkfs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501712 4783 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501721 4783 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04087483-d2dd-4f70-99f1-592a46394263-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.501728 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04087483-d2dd-4f70-99f1-592a46394263-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.905643 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qcbqm" event={"ID":"04087483-d2dd-4f70-99f1-592a46394263","Type":"ContainerDied","Data":"a34228bb0d29433f66a191cca2c583f5b0ad262e498c53266566416b15fddd64"} Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.905681 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a34228bb0d29433f66a191cca2c583f5b0ad262e498c53266566416b15fddd64" Sep 30 13:55:21 crc kubenswrapper[4783]: I0930 13:55:21.905703 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qcbqm" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.211992 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:22 crc kubenswrapper[4783]: E0930 13:55:22.212635 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04087483-d2dd-4f70-99f1-592a46394263" containerName="cinder-db-sync" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.212652 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="04087483-d2dd-4f70-99f1-592a46394263" containerName="cinder-db-sync" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.212804 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="04087483-d2dd-4f70-99f1-592a46394263" containerName="cinder-db-sync" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.213653 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.225685 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.288190 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.301623 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.304401 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-78bp2" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.306327 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.306517 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.307569 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.328563 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333202 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7db7\" (UniqueName: \"kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333296 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333360 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333430 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333501 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.333542 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.398175 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.402734 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.405551 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.407058 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450267 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450328 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v76gv\" (UniqueName: \"kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450445 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7db7\" (UniqueName: \"kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450533 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450611 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450634 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450695 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450836 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450887 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.450967 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.451001 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.451051 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.451779 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.451802 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.451926 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.452058 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.452160 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.468043 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7db7\" (UniqueName: \"kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7\") pod \"dnsmasq-dns-59449b9989-xxcqn\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.540585 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552517 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552564 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552592 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552640 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkcww\" (UniqueName: \"kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552671 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552691 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552742 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552837 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552865 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v76gv\" (UniqueName: \"kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552889 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552924 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552956 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552979 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.552993 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.556861 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.560973 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.563030 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.567814 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.573357 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v76gv\" (UniqueName: \"kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv\") pod \"cinder-scheduler-0\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.632269 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655074 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655358 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655405 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkcww\" (UniqueName: \"kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655425 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655443 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655465 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.655519 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.658959 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.659020 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.659624 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.663486 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.675331 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkcww\" (UniqueName: \"kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.680519 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.685356 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts\") pod \"cinder-api-0\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " pod="openstack/cinder-api-0" Sep 30 13:55:22 crc kubenswrapper[4783]: I0930 13:55:22.726083 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.005204 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.271513 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.358454 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:23 crc kubenswrapper[4783]: W0930 13:55:23.361448 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33c92e45_099f_4186_8cf0_ee8ef6705c9f.slice/crio-51539389368dc6e973a0d667c06cfcce3ec1e5232a0f9eaebb5afd31bae1c148 WatchSource:0}: Error finding container 51539389368dc6e973a0d667c06cfcce3ec1e5232a0f9eaebb5afd31bae1c148: Status 404 returned error can't find the container with id 51539389368dc6e973a0d667c06cfcce3ec1e5232a0f9eaebb5afd31bae1c148 Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.958376 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerStarted","Data":"4a2c40cff1f58fe992964b368cf0962b122ef6dff3a7eba7225b567a6d72f2bc"} Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.961837 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerStarted","Data":"51539389368dc6e973a0d667c06cfcce3ec1e5232a0f9eaebb5afd31bae1c148"} Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.964381 4783 generic.go:334] "Generic (PLEG): container finished" podID="292c2a06-9751-458a-af6b-c2f5687e368e" containerID="af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0" exitCode=0 Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.964496 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" event={"ID":"292c2a06-9751-458a-af6b-c2f5687e368e","Type":"ContainerDied","Data":"af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0"} Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.964523 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" event={"ID":"292c2a06-9751-458a-af6b-c2f5687e368e","Type":"ContainerStarted","Data":"69cab4cdbd813dae901c5ad653f0b8036165f14fc9cd26d23c6ce95b9679c960"} Sep 30 13:55:23 crc kubenswrapper[4783]: I0930 13:55:23.973322 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerStarted","Data":"916893ab254baac2ced51cfc7eb360cfdd80f8d3fae1ed7ec1ff9565d8ff18de"} Sep 30 13:55:24 crc kubenswrapper[4783]: I0930 13:55:24.633728 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:25 crc kubenswrapper[4783]: I0930 13:55:25.014648 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerStarted","Data":"9a58d0f5c774b27da3f59a0e5da9ee410edbe6a0bb2e0e3fcd75600bbedb4c9c"} Sep 30 13:55:25 crc kubenswrapper[4783]: I0930 13:55:25.016856 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerStarted","Data":"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e"} Sep 30 13:55:25 crc kubenswrapper[4783]: I0930 13:55:25.019480 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" event={"ID":"292c2a06-9751-458a-af6b-c2f5687e368e","Type":"ContainerStarted","Data":"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04"} Sep 30 13:55:25 crc kubenswrapper[4783]: I0930 13:55:25.019638 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.028428 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerStarted","Data":"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b"} Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.028976 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.028586 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api" containerID="cri-o://9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" gracePeriod=30 Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.028488 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api-log" containerID="cri-o://6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" gracePeriod=30 Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.032052 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerStarted","Data":"8c9a4a0ee16ca03e151273d85760fd14d45ade98826514c5f6b0b4f5dde16e62"} Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.037518 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerStarted","Data":"0a4fc9533afb30bcbf59dab810c84fd8bfb56443095e7120b1f21feebb690418"} Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.051049 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" podStartSLOduration=4.051035458 podStartE2EDuration="4.051035458s" podCreationTimestamp="2025-09-30 13:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:25.042366466 +0000 UTC m=+1224.973832773" watchObservedRunningTime="2025-09-30 13:55:26.051035458 +0000 UTC m=+1225.982501765" Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.052469 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.052463835 podStartE2EDuration="4.052463835s" podCreationTimestamp="2025-09-30 13:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:26.049595123 +0000 UTC m=+1225.981061430" watchObservedRunningTime="2025-09-30 13:55:26.052463835 +0000 UTC m=+1225.983930142" Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.778554 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.779125 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-log" containerID="cri-o://fb0c68da9baeada510a8e07bf009f63dec05e3b0fd0160cf0f2cd4356e1bafbf" gracePeriod=30 Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.779300 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-httpd" containerID="cri-o://c4c2d396cb8c402c092c9d624b22bee0110dc4673e91dc69423eba5248d63bde" gracePeriod=30 Sep 30 13:55:26 crc kubenswrapper[4783]: I0930 13:55:26.950411 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042295 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042682 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkcww\" (UniqueName: \"kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042763 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042791 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042825 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042893 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.042917 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts\") pod \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\" (UID: \"33c92e45-099f-4186-8cf0-ee8ef6705c9f\") " Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.044308 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.044734 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs" (OuterVolumeSpecName: "logs") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.049790 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts" (OuterVolumeSpecName: "scripts") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.052448 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.054439 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww" (OuterVolumeSpecName: "kube-api-access-pkcww") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "kube-api-access-pkcww". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055015 4783 generic.go:334] "Generic (PLEG): container finished" podID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerID="9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" exitCode=0 Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055055 4783 generic.go:334] "Generic (PLEG): container finished" podID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerID="6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" exitCode=143 Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055109 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerDied","Data":"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b"} Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055142 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerDied","Data":"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e"} Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055155 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"33c92e45-099f-4186-8cf0-ee8ef6705c9f","Type":"ContainerDied","Data":"51539389368dc6e973a0d667c06cfcce3ec1e5232a0f9eaebb5afd31bae1c148"} Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055174 4783 scope.go:117] "RemoveContainer" containerID="9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.055347 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.068435 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerStarted","Data":"4d2857562917695568168339aec1a94456cfb765677b9a844a01b629971e78b2"} Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.078852 4783 generic.go:334] "Generic (PLEG): container finished" podID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerID="fb0c68da9baeada510a8e07bf009f63dec05e3b0fd0160cf0f2cd4356e1bafbf" exitCode=143 Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.078897 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerDied","Data":"fb0c68da9baeada510a8e07bf009f63dec05e3b0fd0160cf0f2cd4356e1bafbf"} Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.099092 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.987680249 podStartE2EDuration="5.099068453s" podCreationTimestamp="2025-09-30 13:55:22 +0000 UTC" firstStartedPulling="2025-09-30 13:55:23.419036241 +0000 UTC m=+1223.350502588" lastFinishedPulling="2025-09-30 13:55:24.530424475 +0000 UTC m=+1224.461890792" observedRunningTime="2025-09-30 13:55:27.090763337 +0000 UTC m=+1227.022229654" watchObservedRunningTime="2025-09-30 13:55:27.099068453 +0000 UTC m=+1227.030534760" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.099394 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.116438 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data" (OuterVolumeSpecName: "config-data") pod "33c92e45-099f-4186-8cf0-ee8ef6705c9f" (UID: "33c92e45-099f-4186-8cf0-ee8ef6705c9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146608 4783 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33c92e45-099f-4186-8cf0-ee8ef6705c9f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146640 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkcww\" (UniqueName: \"kubernetes.io/projected/33c92e45-099f-4186-8cf0-ee8ef6705c9f-kube-api-access-pkcww\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146653 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146662 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33c92e45-099f-4186-8cf0-ee8ef6705c9f-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146670 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146678 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.146686 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33c92e45-099f-4186-8cf0-ee8ef6705c9f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.226833 4783 scope.go:117] "RemoveContainer" containerID="6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.264738 4783 scope.go:117] "RemoveContainer" containerID="9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" Sep 30 13:55:27 crc kubenswrapper[4783]: E0930 13:55:27.265251 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b\": container with ID starting with 9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b not found: ID does not exist" containerID="9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265299 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b"} err="failed to get container status \"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b\": rpc error: code = NotFound desc = could not find container \"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b\": container with ID starting with 9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b not found: ID does not exist" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265327 4783 scope.go:117] "RemoveContainer" containerID="6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" Sep 30 13:55:27 crc kubenswrapper[4783]: E0930 13:55:27.265554 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e\": container with ID starting with 6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e not found: ID does not exist" containerID="6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265583 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e"} err="failed to get container status \"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e\": rpc error: code = NotFound desc = could not find container \"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e\": container with ID starting with 6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e not found: ID does not exist" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265604 4783 scope.go:117] "RemoveContainer" containerID="9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265790 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b"} err="failed to get container status \"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b\": rpc error: code = NotFound desc = could not find container \"9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b\": container with ID starting with 9834812903b2a8bd2d3c862b876d35bebf2d162f56be6e55614a97bfbfca3b3b not found: ID does not exist" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.265813 4783 scope.go:117] "RemoveContainer" containerID="6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.266023 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e"} err="failed to get container status \"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e\": rpc error: code = NotFound desc = could not find container \"6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e\": container with ID starting with 6bc8cbabe1ed5ad5052a0ad413e5450dd4fe561044ab098fe14aebed88bd150e not found: ID does not exist" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.391459 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.398776 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.422458 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:27 crc kubenswrapper[4783]: E0930 13:55:27.422801 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.422818 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api" Sep 30 13:55:27 crc kubenswrapper[4783]: E0930 13:55:27.422853 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api-log" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.422859 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api-log" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.423056 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.423081 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" containerName="cinder-api-log" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.424002 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.426028 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.426209 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.426336 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.436193 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.553895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.553953 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554139 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554231 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffgxp\" (UniqueName: \"kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554277 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554368 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554412 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554527 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.554575 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.633266 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655828 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655873 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655898 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655927 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655969 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.655998 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffgxp\" (UniqueName: \"kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.656022 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.656056 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.656075 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.656832 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.657095 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.662106 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.663646 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.664088 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.664620 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.671337 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.672681 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.677037 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffgxp\" (UniqueName: \"kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp\") pod \"cinder-api-0\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " pod="openstack/cinder-api-0" Sep 30 13:55:27 crc kubenswrapper[4783]: I0930 13:55:27.751971 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.090693 4783 generic.go:334] "Generic (PLEG): container finished" podID="99edfb42-ed13-471e-8e93-62ccafc5b190" containerID="237eb38fbdc94904d60796a2236ae34e060003dfa7daad4c18b8375762d75436" exitCode=0 Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.090766 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wvfc5" event={"ID":"99edfb42-ed13-471e-8e93-62ccafc5b190","Type":"ContainerDied","Data":"237eb38fbdc94904d60796a2236ae34e060003dfa7daad4c18b8375762d75436"} Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.097050 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerStarted","Data":"30e199315c402c331969cbd38432f7b3998b4a381b1bd3fc742d7ca2f4f8dd85"} Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.143904 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.926876459 podStartE2EDuration="9.143882715s" podCreationTimestamp="2025-09-30 13:55:19 +0000 UTC" firstStartedPulling="2025-09-30 13:55:20.81661376 +0000 UTC m=+1220.748080087" lastFinishedPulling="2025-09-30 13:55:27.033620036 +0000 UTC m=+1226.965086343" observedRunningTime="2025-09-30 13:55:28.138165852 +0000 UTC m=+1228.069632159" watchObservedRunningTime="2025-09-30 13:55:28.143882715 +0000 UTC m=+1228.075349022" Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.187438 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.187707 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-log" containerID="cri-o://a3a24d4d11109348dc5aea74a514101dc4fd4971209e6891ef2e78c506c18182" gracePeriod=30 Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.187871 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-httpd" containerID="cri-o://efe36767497a9fc9e028d17967e730c37c0c7d9e9dda36bf40da68bab84aeea1" gracePeriod=30 Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.229372 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:55:28 crc kubenswrapper[4783]: W0930 13:55:28.240194 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fece54c_da0b_4cc0_b20d_b442b2fa73ed.slice/crio-59f4fc2f93b7de531fbd31d16570ccaf625421f9295fb39404a719fd0d51160f WatchSource:0}: Error finding container 59f4fc2f93b7de531fbd31d16570ccaf625421f9295fb39404a719fd0d51160f: Status 404 returned error can't find the container with id 59f4fc2f93b7de531fbd31d16570ccaf625421f9295fb39404a719fd0d51160f Sep 30 13:55:28 crc kubenswrapper[4783]: I0930 13:55:28.876400 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33c92e45-099f-4186-8cf0-ee8ef6705c9f" path="/var/lib/kubelet/pods/33c92e45-099f-4186-8cf0-ee8ef6705c9f/volumes" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.106360 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerID="a3a24d4d11109348dc5aea74a514101dc4fd4971209e6891ef2e78c506c18182" exitCode=143 Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.106760 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerDied","Data":"a3a24d4d11109348dc5aea74a514101dc4fd4971209e6891ef2e78c506c18182"} Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.110588 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerStarted","Data":"117dc1758063264abe9e841b11bdaac117d677993965b0ae1f6a1d411ffb2d13"} Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.110632 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.110645 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerStarted","Data":"59f4fc2f93b7de531fbd31d16570ccaf625421f9295fb39404a719fd0d51160f"} Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.437772 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.492591 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config\") pod \"99edfb42-ed13-471e-8e93-62ccafc5b190\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.492811 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle\") pod \"99edfb42-ed13-471e-8e93-62ccafc5b190\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.492883 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9p5r\" (UniqueName: \"kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r\") pod \"99edfb42-ed13-471e-8e93-62ccafc5b190\" (UID: \"99edfb42-ed13-471e-8e93-62ccafc5b190\") " Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.502750 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r" (OuterVolumeSpecName: "kube-api-access-g9p5r") pod "99edfb42-ed13-471e-8e93-62ccafc5b190" (UID: "99edfb42-ed13-471e-8e93-62ccafc5b190"). InnerVolumeSpecName "kube-api-access-g9p5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.528467 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config" (OuterVolumeSpecName: "config") pod "99edfb42-ed13-471e-8e93-62ccafc5b190" (UID: "99edfb42-ed13-471e-8e93-62ccafc5b190"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.531436 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99edfb42-ed13-471e-8e93-62ccafc5b190" (UID: "99edfb42-ed13-471e-8e93-62ccafc5b190"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.594954 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.594987 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9p5r\" (UniqueName: \"kubernetes.io/projected/99edfb42-ed13-471e-8e93-62ccafc5b190-kube-api-access-g9p5r\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.595000 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/99edfb42-ed13-471e-8e93-62ccafc5b190-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:29 crc kubenswrapper[4783]: I0930 13:55:29.892691 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.120939 4783 generic.go:334] "Generic (PLEG): container finished" podID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerID="c4c2d396cb8c402c092c9d624b22bee0110dc4673e91dc69423eba5248d63bde" exitCode=0 Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.121254 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerDied","Data":"c4c2d396cb8c402c092c9d624b22bee0110dc4673e91dc69423eba5248d63bde"} Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.131909 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wvfc5" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.131922 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wvfc5" event={"ID":"99edfb42-ed13-471e-8e93-62ccafc5b190","Type":"ContainerDied","Data":"1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec"} Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.131974 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b7c42f729b37621a5434eb38128d820a9dd894c1eee42c2a53e328e23d29eec" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.141518 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerStarted","Data":"6deab5bf48649d7f6437dbb5f0e0ebd19ad06c0737cc1e0e97eeb7f38e12f735"} Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.141634 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.175935 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.175913522 podStartE2EDuration="3.175913522s" podCreationTimestamp="2025-09-30 13:55:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:30.1608555 +0000 UTC m=+1230.092321807" watchObservedRunningTime="2025-09-30 13:55:30.175913522 +0000 UTC m=+1230.107379859" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.355028 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.355304 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="dnsmasq-dns" containerID="cri-o://aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04" gracePeriod=10 Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.357902 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.413953 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:55:30 crc kubenswrapper[4783]: E0930 13:55:30.414580 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99edfb42-ed13-471e-8e93-62ccafc5b190" containerName="neutron-db-sync" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.414593 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="99edfb42-ed13-471e-8e93-62ccafc5b190" containerName="neutron-db-sync" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.414788 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="99edfb42-ed13-471e-8e93-62ccafc5b190" containerName="neutron-db-sync" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.416493 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.429440 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.484865 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.486660 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.502833 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.503062 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.503186 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.503320 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-ls4mg" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512648 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512715 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512738 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512796 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512852 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.512876 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6h2n\" (UniqueName: \"kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.517932 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621037 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621104 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr9hw\" (UniqueName: \"kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621278 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621321 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621485 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621554 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621626 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621667 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621704 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621728 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.621761 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6h2n\" (UniqueName: \"kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.623095 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.625040 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.625626 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.677460 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.678068 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.686168 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.687760 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6h2n\" (UniqueName: \"kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n\") pod \"dnsmasq-dns-6c47bb5d77-b72ll\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725554 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725606 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725684 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x26f5\" (UniqueName: \"kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725750 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725775 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725815 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725875 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.725927 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\" (UID: \"be403e4a-ed71-41b9-9c17-16a913ecbd8e\") " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726073 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726374 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726423 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726461 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726548 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr9hw\" (UniqueName: \"kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726643 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.726727 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.730080 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.731477 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs" (OuterVolumeSpecName: "logs") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.732863 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.734614 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.735310 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.742311 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts" (OuterVolumeSpecName: "scripts") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.751439 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5" (OuterVolumeSpecName: "kube-api-access-x26f5") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "kube-api-access-x26f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.760870 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.780103 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr9hw\" (UniqueName: \"kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw\") pod \"neutron-5d5b9bf7c6-787zm\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.793703 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.829360 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.829410 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x26f5\" (UniqueName: \"kubernetes.io/projected/be403e4a-ed71-41b9-9c17-16a913ecbd8e-kube-api-access-x26f5\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.829427 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.829440 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be403e4a-ed71-41b9-9c17-16a913ecbd8e-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.829466 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.832389 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.836818 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.848241 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.854289 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data" (OuterVolumeSpecName: "config-data") pod "be403e4a-ed71-41b9-9c17-16a913ecbd8e" (UID: "be403e4a-ed71-41b9-9c17-16a913ecbd8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.863078 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.931673 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.931711 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.931723 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be403e4a-ed71-41b9-9c17-16a913ecbd8e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:30 crc kubenswrapper[4783]: I0930 13:55:30.941728 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.032491 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.032544 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.032709 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7db7\" (UniqueName: \"kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.032744 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.032837 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.033944 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb\") pod \"292c2a06-9751-458a-af6b-c2f5687e368e\" (UID: \"292c2a06-9751-458a-af6b-c2f5687e368e\") " Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.037751 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7" (OuterVolumeSpecName: "kube-api-access-g7db7") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "kube-api-access-g7db7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.101949 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.125109 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.138279 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7db7\" (UniqueName: \"kubernetes.io/projected/292c2a06-9751-458a-af6b-c2f5687e368e-kube-api-access-g7db7\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.138307 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.138317 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.147279 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config" (OuterVolumeSpecName: "config") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.155766 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be403e4a-ed71-41b9-9c17-16a913ecbd8e","Type":"ContainerDied","Data":"64d2adbb39373353d997c2e672da57610cc574d53e32ace7f919b9db90b97079"} Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.155796 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.155813 4783 scope.go:117] "RemoveContainer" containerID="c4c2d396cb8c402c092c9d624b22bee0110dc4673e91dc69423eba5248d63bde" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.166266 4783 generic.go:334] "Generic (PLEG): container finished" podID="292c2a06-9751-458a-af6b-c2f5687e368e" containerID="aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04" exitCode=0 Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.166418 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" event={"ID":"292c2a06-9751-458a-af6b-c2f5687e368e","Type":"ContainerDied","Data":"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04"} Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.166454 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" event={"ID":"292c2a06-9751-458a-af6b-c2f5687e368e","Type":"ContainerDied","Data":"69cab4cdbd813dae901c5ad653f0b8036165f14fc9cd26d23c6ce95b9679c960"} Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.166465 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59449b9989-xxcqn" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.167337 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-central-agent" containerID="cri-o://4a2c40cff1f58fe992964b368cf0962b122ef6dff3a7eba7225b567a6d72f2bc" gracePeriod=30 Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.167407 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="proxy-httpd" containerID="cri-o://30e199315c402c331969cbd38432f7b3998b4a381b1bd3fc742d7ca2f4f8dd85" gracePeriod=30 Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.167414 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="sg-core" containerID="cri-o://0a4fc9533afb30bcbf59dab810c84fd8bfb56443095e7120b1f21feebb690418" gracePeriod=30 Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.167426 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-notification-agent" containerID="cri-o://9a58d0f5c774b27da3f59a0e5da9ee410edbe6a0bb2e0e3fcd75600bbedb4c9c" gracePeriod=30 Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.224598 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.227841 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "292c2a06-9751-458a-af6b-c2f5687e368e" (UID: "292c2a06-9751-458a-af6b-c2f5687e368e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.245734 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.245786 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.245798 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/292c2a06-9751-458a-af6b-c2f5687e368e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.372274 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.385261 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.402270 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.403083 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="dnsmasq-dns" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403109 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="dnsmasq-dns" Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.403129 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="init" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403137 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="init" Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.403147 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-httpd" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403155 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-httpd" Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.403171 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-log" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403178 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-log" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403410 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-log" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403435 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" containerName="glance-httpd" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.403455 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" containerName="dnsmasq-dns" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.404655 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.409729 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.410212 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.410360 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.410688 4783 scope.go:117] "RemoveContainer" containerID="fb0c68da9baeada510a8e07bf009f63dec05e3b0fd0160cf0f2cd4356e1bafbf" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.416825 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.439571 4783 scope.go:117] "RemoveContainer" containerID="aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452151 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452215 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452257 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452278 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452314 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452331 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452351 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.452371 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxrjn\" (UniqueName: \"kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.498516 4783 scope.go:117] "RemoveContainer" containerID="af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.540290 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.553445 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.553507 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554063 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554112 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554170 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554243 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554287 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554334 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxrjn\" (UniqueName: \"kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554473 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.554698 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.555008 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.557390 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59449b9989-xxcqn"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.560825 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.561425 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.566491 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.567363 4783 scope.go:117] "RemoveContainer" containerID="aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04" Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.569603 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04\": container with ID starting with aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04 not found: ID does not exist" containerID="aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.569669 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04"} err="failed to get container status \"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04\": rpc error: code = NotFound desc = could not find container \"aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04\": container with ID starting with aecad23e6b4dc0aa0acf782c74538e26d0f373894066c923de5834591c08ef04 not found: ID does not exist" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.569710 4783 scope.go:117] "RemoveContainer" containerID="af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0" Sep 30 13:55:31 crc kubenswrapper[4783]: E0930 13:55:31.572970 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0\": container with ID starting with af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0 not found: ID does not exist" containerID="af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.573018 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0"} err="failed to get container status \"af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0\": rpc error: code = NotFound desc = could not find container \"af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0\": container with ID starting with af164cdab79986ad1ef58f3128b3351843cfa6f2b2741c1169051422073c65b0 not found: ID does not exist" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.577680 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxrjn\" (UniqueName: \"kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.579473 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.591108 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " pod="openstack/glance-default-external-api-0" Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.664676 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:55:31 crc kubenswrapper[4783]: I0930 13:55:31.725266 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.228072 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerStarted","Data":"60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.228375 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerStarted","Data":"4c63b28991caaee14fa3ecec26bf3ef5a6c5499beb811d601297221fc5f7990d"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.230937 4783 generic.go:334] "Generic (PLEG): container finished" podID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerID="1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e" exitCode=0 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.230973 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" event={"ID":"9f29adc0-f647-4bb9-98fc-8124c7f30a2d","Type":"ContainerDied","Data":"1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.230986 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" event={"ID":"9f29adc0-f647-4bb9-98fc-8124c7f30a2d","Type":"ContainerStarted","Data":"b0d9261b33eafba6dba7c4c5437b90ab5df0813b0205b89ea1cb059c94627a84"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246478 4783 generic.go:334] "Generic (PLEG): container finished" podID="591731a6-650f-464f-90dd-8d5822adc81a" containerID="30e199315c402c331969cbd38432f7b3998b4a381b1bd3fc742d7ca2f4f8dd85" exitCode=0 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246508 4783 generic.go:334] "Generic (PLEG): container finished" podID="591731a6-650f-464f-90dd-8d5822adc81a" containerID="0a4fc9533afb30bcbf59dab810c84fd8bfb56443095e7120b1f21feebb690418" exitCode=2 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246517 4783 generic.go:334] "Generic (PLEG): container finished" podID="591731a6-650f-464f-90dd-8d5822adc81a" containerID="9a58d0f5c774b27da3f59a0e5da9ee410edbe6a0bb2e0e3fcd75600bbedb4c9c" exitCode=0 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246524 4783 generic.go:334] "Generic (PLEG): container finished" podID="591731a6-650f-464f-90dd-8d5822adc81a" containerID="4a2c40cff1f58fe992964b368cf0962b122ef6dff3a7eba7225b567a6d72f2bc" exitCode=0 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246581 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerDied","Data":"30e199315c402c331969cbd38432f7b3998b4a381b1bd3fc742d7ca2f4f8dd85"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246606 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerDied","Data":"0a4fc9533afb30bcbf59dab810c84fd8bfb56443095e7120b1f21feebb690418"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246638 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerDied","Data":"9a58d0f5c774b27da3f59a0e5da9ee410edbe6a0bb2e0e3fcd75600bbedb4c9c"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.246648 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerDied","Data":"4a2c40cff1f58fe992964b368cf0962b122ef6dff3a7eba7225b567a6d72f2bc"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.270850 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerID="efe36767497a9fc9e028d17967e730c37c0c7d9e9dda36bf40da68bab84aeea1" exitCode=0 Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.271044 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerDied","Data":"efe36767497a9fc9e028d17967e730c37c0c7d9e9dda36bf40da68bab84aeea1"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.271119 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6b7447c7-b05c-4a35-99a8-212d2fccfdbb","Type":"ContainerDied","Data":"3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99"} Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.271444 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fd1766ad78e24aa62e2b7df630fccd1e7b5440a6d739f5e33121a20d551fc99" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.276727 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.325632 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388238 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388281 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388314 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388370 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388440 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388468 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388501 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwv2w\" (UniqueName: \"kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388529 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388557 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388574 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd\") pod \"591731a6-650f-464f-90dd-8d5822adc81a\" (UID: \"591731a6-650f-464f-90dd-8d5822adc81a\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388593 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388612 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388631 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94xj6\" (UniqueName: \"kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388672 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.388718 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts\") pod \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\" (UID: \"6b7447c7-b05c-4a35-99a8-212d2fccfdbb\") " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.396518 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.397150 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.397393 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs" (OuterVolumeSpecName: "logs") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.397640 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.406113 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.413426 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts" (OuterVolumeSpecName: "scripts") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.423829 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts" (OuterVolumeSpecName: "scripts") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.423858 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w" (OuterVolumeSpecName: "kube-api-access-xwv2w") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "kube-api-access-xwv2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.427369 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6" (OuterVolumeSpecName: "kube-api-access-94xj6") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "kube-api-access-94xj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.485236 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490319 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490344 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490354 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwv2w\" (UniqueName: \"kubernetes.io/projected/591731a6-650f-464f-90dd-8d5822adc81a-kube-api-access-xwv2w\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490363 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490383 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490392 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/591731a6-650f-464f-90dd-8d5822adc81a-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490400 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94xj6\" (UniqueName: \"kubernetes.io/projected/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-kube-api-access-94xj6\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490408 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.490416 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.538608 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.550997 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.592450 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.592478 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.631747 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.665418 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.694176 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.694210 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.808744 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.809525 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data" (OuterVolumeSpecName: "config-data") pod "6b7447c7-b05c-4a35-99a8-212d2fccfdbb" (UID: "6b7447c7-b05c-4a35-99a8-212d2fccfdbb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.828510 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data" (OuterVolumeSpecName: "config-data") pod "591731a6-650f-464f-90dd-8d5822adc81a" (UID: "591731a6-650f-464f-90dd-8d5822adc81a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.864509 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="292c2a06-9751-458a-af6b-c2f5687e368e" path="/var/lib/kubelet/pods/292c2a06-9751-458a-af6b-c2f5687e368e/volumes" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.865695 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be403e4a-ed71-41b9-9c17-16a913ecbd8e" path="/var/lib/kubelet/pods/be403e4a-ed71-41b9-9c17-16a913ecbd8e/volumes" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.903775 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7447c7-b05c-4a35-99a8-212d2fccfdbb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.903802 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:32 crc kubenswrapper[4783]: I0930 13:55:32.903813 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/591731a6-650f-464f-90dd-8d5822adc81a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.139541 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.198559 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.290968 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"591731a6-650f-464f-90dd-8d5822adc81a","Type":"ContainerDied","Data":"3ad822ad6e59e6cfbd45e84fbe667202fcc1ab259fc6d425ce857a05639b8e9f"} Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.290997 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.291014 4783 scope.go:117] "RemoveContainer" containerID="30e199315c402c331969cbd38432f7b3998b4a381b1bd3fc742d7ca2f4f8dd85" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.298038 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerStarted","Data":"0146ef3582b3f46f0018759ff04f24fa8d7c3428f688adbf7c784affe0d1a231"} Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.298827 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.301020 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerStarted","Data":"2096da2284588dfdaeed1715bebfcb28123ee57319d9c0bb9e0959ff0768ccd2"} Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.310491 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="cinder-scheduler" containerID="cri-o://8c9a4a0ee16ca03e151273d85760fd14d45ade98826514c5f6b0b4f5dde16e62" gracePeriod=30 Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.310830 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" event={"ID":"9f29adc0-f647-4bb9-98fc-8124c7f30a2d","Type":"ContainerStarted","Data":"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32"} Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.310881 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="probe" containerID="cri-o://4d2857562917695568168339aec1a94456cfb765677b9a844a01b629971e78b2" gracePeriod=30 Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.311346 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.311371 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.328322 4783 scope.go:117] "RemoveContainer" containerID="0a4fc9533afb30bcbf59dab810c84fd8bfb56443095e7120b1f21feebb690418" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.328439 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.340706 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.345892 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5d5b9bf7c6-787zm" podStartSLOduration=3.3458740049999998 podStartE2EDuration="3.345874005s" podCreationTimestamp="2025-09-30 13:55:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:33.325577586 +0000 UTC m=+1233.257043903" watchObservedRunningTime="2025-09-30 13:55:33.345874005 +0000 UTC m=+1233.277340312" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360346 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360833 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="proxy-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360848 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="proxy-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360860 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-notification-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360867 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-notification-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360902 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-log" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360910 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-log" Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360922 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-central-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360929 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-central-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360939 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="sg-core" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360946 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="sg-core" Sep 30 13:55:33 crc kubenswrapper[4783]: E0930 13:55:33.360963 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.360972 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361191 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="proxy-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361206 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-central-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361214 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-log" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361251 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="ceilometer-notification-agent" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361268 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" containerName="glance-httpd" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.361281 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="591731a6-650f-464f-90dd-8d5822adc81a" containerName="sg-core" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.363500 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.366443 4783 scope.go:117] "RemoveContainer" containerID="9a58d0f5c774b27da3f59a0e5da9ee410edbe6a0bb2e0e3fcd75600bbedb4c9c" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.366769 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.367891 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.372416 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.400186 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414276 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g56j7\" (UniqueName: \"kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414333 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414405 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414459 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414497 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414515 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.414581 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.435349 4783 scope.go:117] "RemoveContainer" containerID="4a2c40cff1f58fe992964b368cf0962b122ef6dff3a7eba7225b567a6d72f2bc" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.435701 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.445617 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.445995 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" podStartSLOduration=3.445978633 podStartE2EDuration="3.445978633s" podCreationTimestamp="2025-09-30 13:55:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:33.388154075 +0000 UTC m=+1233.319620382" watchObservedRunningTime="2025-09-30 13:55:33.445978633 +0000 UTC m=+1233.377444940" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.454788 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.458559 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.461713 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.467659 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516212 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g56j7\" (UniqueName: \"kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516465 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516496 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516525 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516543 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516559 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.516596 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.517661 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.521684 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.525464 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.526910 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.527953 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.530647 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.544778 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g56j7\" (UniqueName: \"kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7\") pod \"ceilometer-0\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618365 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618417 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618455 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618478 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r9ml\" (UniqueName: \"kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618537 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618576 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.618624 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.694466 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.720635 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.720707 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.720741 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.720784 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.720808 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721121 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721152 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r9ml\" (UniqueName: \"kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721211 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721434 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721848 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.721965 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.727329 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.727853 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.730422 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.731801 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.742362 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r9ml\" (UniqueName: \"kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.753187 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.817036 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.896287 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.897608 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.900567 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.903149 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 13:55:33 crc kubenswrapper[4783]: I0930 13:55:33.915019 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.036619 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.061918 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.061972 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.062005 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.062034 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.062105 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.062131 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.062153 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hvf7\" (UniqueName: \"kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165297 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165558 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165585 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hvf7\" (UniqueName: \"kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165630 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165653 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165680 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.165705 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.173077 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.173794 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.175268 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.176324 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.177019 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.182953 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.198597 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hvf7\" (UniqueName: \"kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7\") pod \"neutron-7c9bc45547-5grb6\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.224698 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.271324 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.356507 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerStarted","Data":"9c00cf71bb8b3efbefb119c4700536e994f2ca1128db0c5280a9b57683983551"} Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.358280 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerStarted","Data":"46acadfd94b84d50c173d4b229b89edae4e39eff498fd6a6e10715f46e38eb46"} Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.533558 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:55:34 crc kubenswrapper[4783]: W0930 13:55:34.549875 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2419c631_f6ff_431e_bb3b_2c3285eda678.slice/crio-9fd4a3f25a7ff5632270116d7ad9c058d001cd6c2d1a76df2b6e2b978af3aee1 WatchSource:0}: Error finding container 9fd4a3f25a7ff5632270116d7ad9c058d001cd6c2d1a76df2b6e2b978af3aee1: Status 404 returned error can't find the container with id 9fd4a3f25a7ff5632270116d7ad9c058d001cd6c2d1a76df2b6e2b978af3aee1 Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.862572 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="591731a6-650f-464f-90dd-8d5822adc81a" path="/var/lib/kubelet/pods/591731a6-650f-464f-90dd-8d5822adc81a/volumes" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.863495 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b7447c7-b05c-4a35-99a8-212d2fccfdbb" path="/var/lib/kubelet/pods/6b7447c7-b05c-4a35-99a8-212d2fccfdbb/volumes" Sep 30 13:55:34 crc kubenswrapper[4783]: I0930 13:55:34.914773 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:55:34 crc kubenswrapper[4783]: W0930 13:55:34.920345 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaea997d7_7510_42b0_91f8_07592048868f.slice/crio-f90d7ff480b3466d639d9fd3d1a265fff6e38d607057f215ac9fe6ec3245a129 WatchSource:0}: Error finding container f90d7ff480b3466d639d9fd3d1a265fff6e38d607057f215ac9fe6ec3245a129: Status 404 returned error can't find the container with id f90d7ff480b3466d639d9fd3d1a265fff6e38d607057f215ac9fe6ec3245a129 Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.268140 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-r5r75"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.269529 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.283429 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r5r75"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.383878 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerStarted","Data":"f90d7ff480b3466d639d9fd3d1a265fff6e38d607057f215ac9fe6ec3245a129"} Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.387508 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-qqwkt"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.389500 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.389872 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerID="4d2857562917695568168339aec1a94456cfb765677b9a844a01b629971e78b2" exitCode=0 Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.389955 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerDied","Data":"4d2857562917695568168339aec1a94456cfb765677b9a844a01b629971e78b2"} Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.391094 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thg62\" (UniqueName: \"kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62\") pod \"nova-api-db-create-r5r75\" (UID: \"5d9f067a-87ea-4ecf-8142-1e28d8d98574\") " pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.406397 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerStarted","Data":"9fd4a3f25a7ff5632270116d7ad9c058d001cd6c2d1a76df2b6e2b978af3aee1"} Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.408116 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qqwkt"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.411406 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerStarted","Data":"1bccf10c6c93de0d51e10e64262519a909d10f198ba045f898de5f0df6447a1d"} Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.444270 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.444251885 podStartE2EDuration="4.444251885s" podCreationTimestamp="2025-09-30 13:55:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:35.433801561 +0000 UTC m=+1235.365267878" watchObservedRunningTime="2025-09-30 13:55:35.444251885 +0000 UTC m=+1235.375718192" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.482446 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-rtbsj"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.483661 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.495146 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thg62\" (UniqueName: \"kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62\") pod \"nova-api-db-create-r5r75\" (UID: \"5d9f067a-87ea-4ecf-8142-1e28d8d98574\") " pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.495305 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnkp7\" (UniqueName: \"kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7\") pod \"nova-cell0-db-create-qqwkt\" (UID: \"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea\") " pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.506266 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-rtbsj"] Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.536900 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thg62\" (UniqueName: \"kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62\") pod \"nova-api-db-create-r5r75\" (UID: \"5d9f067a-87ea-4ecf-8142-1e28d8d98574\") " pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.598451 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gvcf\" (UniqueName: \"kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf\") pod \"nova-cell1-db-create-rtbsj\" (UID: \"ec598217-7641-433b-938d-e2740a05a9e1\") " pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.598537 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnkp7\" (UniqueName: \"kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7\") pod \"nova-cell0-db-create-qqwkt\" (UID: \"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea\") " pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.605557 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.621941 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnkp7\" (UniqueName: \"kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7\") pod \"nova-cell0-db-create-qqwkt\" (UID: \"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea\") " pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.701238 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gvcf\" (UniqueName: \"kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf\") pod \"nova-cell1-db-create-rtbsj\" (UID: \"ec598217-7641-433b-938d-e2740a05a9e1\") " pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.717282 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gvcf\" (UniqueName: \"kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf\") pod \"nova-cell1-db-create-rtbsj\" (UID: \"ec598217-7641-433b-938d-e2740a05a9e1\") " pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.910643 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:35 crc kubenswrapper[4783]: I0930 13:55:35.949273 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:36 crc kubenswrapper[4783]: W0930 13:55:36.206140 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d9f067a_87ea_4ecf_8142_1e28d8d98574.slice/crio-30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d WatchSource:0}: Error finding container 30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d: Status 404 returned error can't find the container with id 30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.222600 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r5r75"] Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.430704 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qqwkt"] Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.477502 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r5r75" event={"ID":"5d9f067a-87ea-4ecf-8142-1e28d8d98574","Type":"ContainerStarted","Data":"30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d"} Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.482155 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerStarted","Data":"40a405fda44ba184b836f4f22105f610a1f2f4078bd7ae78c09b94c9367d95c2"} Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.483600 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerStarted","Data":"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c"} Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.485313 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerStarted","Data":"a27d904ac13e2388585d777e5ccadfdbee717dc617233684f8f3e68da41b3116"} Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.485350 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerStarted","Data":"082fb383f645276bbaa075b85be6d49c88105cfa13629a7bcfb3725d2695cb56"} Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.485396 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.517855 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7c9bc45547-5grb6" podStartSLOduration=3.5178340649999997 podStartE2EDuration="3.517834065s" podCreationTimestamp="2025-09-30 13:55:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:36.516088588 +0000 UTC m=+1236.447554905" watchObservedRunningTime="2025-09-30 13:55:36.517834065 +0000 UTC m=+1236.449300392" Sep 30 13:55:36 crc kubenswrapper[4783]: I0930 13:55:36.655958 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-rtbsj"] Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.503659 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qqwkt" event={"ID":"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea","Type":"ContainerStarted","Data":"3ae59eb86d91fb5c806e663e95ec547ef87d696b32c8a261a6ab43509cdb555f"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.504012 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qqwkt" event={"ID":"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea","Type":"ContainerStarted","Data":"b69f364f78b618bf7ed61f95b584e5349c289b3df11cc61306a24a8b8a368a76"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.507152 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r5r75" event={"ID":"5d9f067a-87ea-4ecf-8142-1e28d8d98574","Type":"ContainerStarted","Data":"5d04af509a9bb7bfbf8994b131a09676b7cd7a088b0bc9fad1edda3ef3454d1e"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.519828 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rtbsj" event={"ID":"ec598217-7641-433b-938d-e2740a05a9e1","Type":"ContainerStarted","Data":"70af416d1d605f74050168a2f3389c55e91e4acb27456ff5ec693839bb6060e7"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.519868 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rtbsj" event={"ID":"ec598217-7641-433b-938d-e2740a05a9e1","Type":"ContainerStarted","Data":"6841b04ff0a4445459d9f9debd0cef075b73de0ba0ba7186c47449cb3b59c16a"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.524005 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-qqwkt" podStartSLOduration=2.523990419 podStartE2EDuration="2.523990419s" podCreationTimestamp="2025-09-30 13:55:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:37.520206679 +0000 UTC m=+1237.451672996" watchObservedRunningTime="2025-09-30 13:55:37.523990419 +0000 UTC m=+1237.455456726" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.537123 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerStarted","Data":"069ecdff26e68e0d96f961dea1f277e66bd9d7eb17de82605d0a89f72c085c42"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.543857 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerStarted","Data":"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.546888 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-r5r75" podStartSLOduration=2.546878001 podStartE2EDuration="2.546878001s" podCreationTimestamp="2025-09-30 13:55:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:37.546451447 +0000 UTC m=+1237.477917754" watchObservedRunningTime="2025-09-30 13:55:37.546878001 +0000 UTC m=+1237.478344308" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.549884 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerID="8c9a4a0ee16ca03e151273d85760fd14d45ade98826514c5f6b0b4f5dde16e62" exitCode=0 Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.549924 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerDied","Data":"8c9a4a0ee16ca03e151273d85760fd14d45ade98826514c5f6b0b4f5dde16e62"} Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.568185 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-rtbsj" podStartSLOduration=2.5681675 podStartE2EDuration="2.5681675s" podCreationTimestamp="2025-09-30 13:55:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:37.558543923 +0000 UTC m=+1237.490010230" watchObservedRunningTime="2025-09-30 13:55:37.5681675 +0000 UTC m=+1237.499633807" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.878636 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.908173 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.908154553 podStartE2EDuration="4.908154553s" podCreationTimestamp="2025-09-30 13:55:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:37.594523133 +0000 UTC m=+1237.525989440" watchObservedRunningTime="2025-09-30 13:55:37.908154553 +0000 UTC m=+1237.839620860" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957371 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957420 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957480 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v76gv\" (UniqueName: \"kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957501 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957557 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957617 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.957647 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle\") pod \"6c8f9900-6e27-42d2-900f-9da5172a3d55\" (UID: \"6c8f9900-6e27-42d2-900f-9da5172a3d55\") " Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.958093 4783 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c8f9900-6e27-42d2-900f-9da5172a3d55-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.964126 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv" (OuterVolumeSpecName: "kube-api-access-v76gv") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "kube-api-access-v76gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.968440 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts" (OuterVolumeSpecName: "scripts") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:37 crc kubenswrapper[4783]: I0930 13:55:37.971373 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.061363 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.061556 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v76gv\" (UniqueName: \"kubernetes.io/projected/6c8f9900-6e27-42d2-900f-9da5172a3d55-kube-api-access-v76gv\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.061685 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.061744 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.163343 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.167367 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data" (OuterVolumeSpecName: "config-data") pod "6c8f9900-6e27-42d2-900f-9da5172a3d55" (UID: "6c8f9900-6e27-42d2-900f-9da5172a3d55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.265513 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c8f9900-6e27-42d2-900f-9da5172a3d55-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.559459 4783 generic.go:334] "Generic (PLEG): container finished" podID="3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" containerID="3ae59eb86d91fb5c806e663e95ec547ef87d696b32c8a261a6ab43509cdb555f" exitCode=0 Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.559544 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qqwkt" event={"ID":"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea","Type":"ContainerDied","Data":"3ae59eb86d91fb5c806e663e95ec547ef87d696b32c8a261a6ab43509cdb555f"} Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.560968 4783 generic.go:334] "Generic (PLEG): container finished" podID="5d9f067a-87ea-4ecf-8142-1e28d8d98574" containerID="5d04af509a9bb7bfbf8994b131a09676b7cd7a088b0bc9fad1edda3ef3454d1e" exitCode=0 Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.561029 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r5r75" event={"ID":"5d9f067a-87ea-4ecf-8142-1e28d8d98574","Type":"ContainerDied","Data":"5d04af509a9bb7bfbf8994b131a09676b7cd7a088b0bc9fad1edda3ef3454d1e"} Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.562234 4783 generic.go:334] "Generic (PLEG): container finished" podID="ec598217-7641-433b-938d-e2740a05a9e1" containerID="70af416d1d605f74050168a2f3389c55e91e4acb27456ff5ec693839bb6060e7" exitCode=0 Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.562284 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rtbsj" event={"ID":"ec598217-7641-433b-938d-e2740a05a9e1","Type":"ContainerDied","Data":"70af416d1d605f74050168a2f3389c55e91e4acb27456ff5ec693839bb6060e7"} Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.564555 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerStarted","Data":"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a"} Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.566356 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c8f9900-6e27-42d2-900f-9da5172a3d55","Type":"ContainerDied","Data":"916893ab254baac2ced51cfc7eb360cfdd80f8d3fae1ed7ec1ff9565d8ff18de"} Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.566379 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.566407 4783 scope.go:117] "RemoveContainer" containerID="4d2857562917695568168339aec1a94456cfb765677b9a844a01b629971e78b2" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.591122 4783 scope.go:117] "RemoveContainer" containerID="8c9a4a0ee16ca03e151273d85760fd14d45ade98826514c5f6b0b4f5dde16e62" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.640947 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.649997 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.663518 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:38 crc kubenswrapper[4783]: E0930 13:55:38.663989 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="cinder-scheduler" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.664009 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="cinder-scheduler" Sep 30 13:55:38 crc kubenswrapper[4783]: E0930 13:55:38.664031 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="probe" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.664039 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="probe" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.664248 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="cinder-scheduler" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.664265 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" containerName="probe" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.665467 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.668652 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.687597 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.772832 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6kdw\" (UniqueName: \"kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.772904 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.773104 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.773289 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.773337 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.773457 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.854843 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c8f9900-6e27-42d2-900f-9da5172a3d55" path="/var/lib/kubelet/pods/6c8f9900-6e27-42d2-900f-9da5172a3d55/volumes" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875040 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875136 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875160 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875199 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875259 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6kdw\" (UniqueName: \"kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875296 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.875976 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.880300 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.881611 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.881771 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.894766 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.895184 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6kdw\" (UniqueName: \"kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw\") pod \"cinder-scheduler-0\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " pod="openstack/cinder-scheduler-0" Sep 30 13:55:38 crc kubenswrapper[4783]: I0930 13:55:38.981935 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:55:39 crc kubenswrapper[4783]: I0930 13:55:39.520328 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:55:39 crc kubenswrapper[4783]: I0930 13:55:39.586266 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerStarted","Data":"0f1c9ebe4e5e0df5b0b08c378fe3daffed45fbc50314a6f51ae989e9816b5156"} Sep 30 13:55:39 crc kubenswrapper[4783]: I0930 13:55:39.997910 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.101898 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnkp7\" (UniqueName: \"kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7\") pod \"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea\" (UID: \"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea\") " Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.122404 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7" (OuterVolumeSpecName: "kube-api-access-xnkp7") pod "3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" (UID: "3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea"). InnerVolumeSpecName "kube-api-access-xnkp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.204110 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnkp7\" (UniqueName: \"kubernetes.io/projected/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea-kube-api-access-xnkp7\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.226699 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.287968 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.315129 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thg62\" (UniqueName: \"kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62\") pod \"5d9f067a-87ea-4ecf-8142-1e28d8d98574\" (UID: \"5d9f067a-87ea-4ecf-8142-1e28d8d98574\") " Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.319939 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.342510 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62" (OuterVolumeSpecName: "kube-api-access-thg62") pod "5d9f067a-87ea-4ecf-8142-1e28d8d98574" (UID: "5d9f067a-87ea-4ecf-8142-1e28d8d98574"). InnerVolumeSpecName "kube-api-access-thg62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.417203 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gvcf\" (UniqueName: \"kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf\") pod \"ec598217-7641-433b-938d-e2740a05a9e1\" (UID: \"ec598217-7641-433b-938d-e2740a05a9e1\") " Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.421837 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thg62\" (UniqueName: \"kubernetes.io/projected/5d9f067a-87ea-4ecf-8142-1e28d8d98574-kube-api-access-thg62\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.423116 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf" (OuterVolumeSpecName: "kube-api-access-7gvcf") pod "ec598217-7641-433b-938d-e2740a05a9e1" (UID: "ec598217-7641-433b-938d-e2740a05a9e1"). InnerVolumeSpecName "kube-api-access-7gvcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.523697 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gvcf\" (UniqueName: \"kubernetes.io/projected/ec598217-7641-433b-938d-e2740a05a9e1-kube-api-access-7gvcf\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.602795 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qqwkt" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.603509 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qqwkt" event={"ID":"3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea","Type":"ContainerDied","Data":"b69f364f78b618bf7ed61f95b584e5349c289b3df11cc61306a24a8b8a368a76"} Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.603540 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b69f364f78b618bf7ed61f95b584e5349c289b3df11cc61306a24a8b8a368a76" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.605236 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r5r75" event={"ID":"5d9f067a-87ea-4ecf-8142-1e28d8d98574","Type":"ContainerDied","Data":"30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d"} Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.605255 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30439de679625f2148ade271eea61a0aef0cb5994126046eee015004d2ec4e3d" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.605304 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r5r75" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.607837 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rtbsj" event={"ID":"ec598217-7641-433b-938d-e2740a05a9e1","Type":"ContainerDied","Data":"6841b04ff0a4445459d9f9debd0cef075b73de0ba0ba7186c47449cb3b59c16a"} Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.607879 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6841b04ff0a4445459d9f9debd0cef075b73de0ba0ba7186c47449cb3b59c16a" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.607942 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rtbsj" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.615208 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerStarted","Data":"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e"} Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.626647 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerStarted","Data":"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060"} Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.626913 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-central-agent" containerID="cri-o://8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c" gracePeriod=30 Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.627186 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.627544 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="proxy-httpd" containerID="cri-o://33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060" gracePeriod=30 Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.627701 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="sg-core" containerID="cri-o://5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a" gracePeriod=30 Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.627794 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-notification-agent" containerID="cri-o://9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3" gracePeriod=30 Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.655800 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.244762831 podStartE2EDuration="7.655782556s" podCreationTimestamp="2025-09-30 13:55:33 +0000 UTC" firstStartedPulling="2025-09-30 13:55:34.30038941 +0000 UTC m=+1234.231855717" lastFinishedPulling="2025-09-30 13:55:39.711409135 +0000 UTC m=+1239.642875442" observedRunningTime="2025-09-30 13:55:40.64964276 +0000 UTC m=+1240.581109067" watchObservedRunningTime="2025-09-30 13:55:40.655782556 +0000 UTC m=+1240.587248873" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.838418 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.903063 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:55:40 crc kubenswrapper[4783]: I0930 13:55:40.904342 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="dnsmasq-dns" containerID="cri-o://a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f" gracePeriod=10 Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.564462 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651301 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651378 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgm92\" (UniqueName: \"kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651411 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651441 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651464 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.651536 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb\") pod \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\" (UID: \"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60\") " Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.674975 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92" (OuterVolumeSpecName: "kube-api-access-tgm92") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "kube-api-access-tgm92". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.732183 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.732241 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.795052 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgm92\" (UniqueName: \"kubernetes.io/projected/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-kube-api-access-tgm92\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.799633 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config" (OuterVolumeSpecName: "config") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815872 4783 generic.go:334] "Generic (PLEG): container finished" podID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerID="33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060" exitCode=0 Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815904 4783 generic.go:334] "Generic (PLEG): container finished" podID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerID="5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a" exitCode=2 Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815911 4783 generic.go:334] "Generic (PLEG): container finished" podID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerID="9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3" exitCode=0 Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815943 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerDied","Data":"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060"} Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815968 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerDied","Data":"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a"} Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.815979 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerDied","Data":"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3"} Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.817977 4783 generic.go:334] "Generic (PLEG): container finished" podID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerID="a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f" exitCode=0 Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.818003 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" event={"ID":"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60","Type":"ContainerDied","Data":"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f"} Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.818020 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" event={"ID":"a0183b5a-e3e9-4689-ab48-ea5b73e0bd60","Type":"ContainerDied","Data":"801f9ce7e6a71de4e8305af57adad8f4501f2a1bbbe2a38fc0dbb1d08f25fa26"} Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.818037 4783 scope.go:117] "RemoveContainer" containerID="a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.818182 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.848687 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.852341 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.866933 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.872846 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.877756 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.878925 4783 scope.go:117] "RemoveContainer" containerID="c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.893238 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" (UID: "a0183b5a-e3e9-4689-ab48-ea5b73e0bd60"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.903255 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.903303 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.903316 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.903329 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.903340 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.930764 4783 scope.go:117] "RemoveContainer" containerID="a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f" Sep 30 13:55:41 crc kubenswrapper[4783]: E0930 13:55:41.931634 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f\": container with ID starting with a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f not found: ID does not exist" containerID="a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.931676 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f"} err="failed to get container status \"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f\": rpc error: code = NotFound desc = could not find container \"a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f\": container with ID starting with a33ca8dc7a63d510ef6c423ebc55a0e0ffa23772903b86590963b73a84a9d33f not found: ID does not exist" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.931709 4783 scope.go:117] "RemoveContainer" containerID="c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350" Sep 30 13:55:41 crc kubenswrapper[4783]: E0930 13:55:41.932095 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350\": container with ID starting with c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350 not found: ID does not exist" containerID="c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350" Sep 30 13:55:41 crc kubenswrapper[4783]: I0930 13:55:41.932116 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350"} err="failed to get container status \"c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350\": rpc error: code = NotFound desc = could not find container \"c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350\": container with ID starting with c1ed12a1abcc52f48cf0000931ebf5d559373be49d418f4dc5cb4bb23ac3e350 not found: ID does not exist" Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.164279 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.177428 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c856dc5f9-xnnct"] Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.828768 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerStarted","Data":"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276"} Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.831752 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.832278 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.855843 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.855824225 podStartE2EDuration="4.855824225s" podCreationTimestamp="2025-09-30 13:55:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:55:42.854543163 +0000 UTC m=+1242.786009490" watchObservedRunningTime="2025-09-30 13:55:42.855824225 +0000 UTC m=+1242.787290532" Sep 30 13:55:42 crc kubenswrapper[4783]: I0930 13:55:42.908604 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" path="/var/lib/kubelet/pods/a0183b5a-e3e9-4689-ab48-ea5b73e0bd60/volumes" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.818534 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.818875 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.861750 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.862760 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.873391 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:43 crc kubenswrapper[4783]: I0930 13:55:43.982118 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.814933 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.866464 4783 generic.go:334] "Generic (PLEG): container finished" podID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerID="8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c" exitCode=0 Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.867330 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.867944 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerDied","Data":"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c"} Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.867972 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f","Type":"ContainerDied","Data":"46acadfd94b84d50c173d4b229b89edae4e39eff498fd6a6e10715f46e38eb46"} Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.867991 4783 scope.go:117] "RemoveContainer" containerID="33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.868077 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.868086 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.869184 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882485 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g56j7\" (UniqueName: \"kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882664 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882741 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882768 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882896 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.882972 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.883213 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml\") pod \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\" (UID: \"0a37fb8f-6678-4a4f-be3f-41fc6ed2408f\") " Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.888495 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts" (OuterVolumeSpecName: "scripts") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.888602 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.888742 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.896495 4783 scope.go:117] "RemoveContainer" containerID="5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.904338 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7" (OuterVolumeSpecName: "kube-api-access-g56j7") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "kube-api-access-g56j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.931804 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.963412 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985120 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985147 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g56j7\" (UniqueName: \"kubernetes.io/projected/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-kube-api-access-g56j7\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985160 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985168 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985176 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:44 crc kubenswrapper[4783]: I0930 13:55:44.985185 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.011529 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data" (OuterVolumeSpecName: "config-data") pod "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" (UID: "0a37fb8f-6678-4a4f-be3f-41fc6ed2408f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.014803 4783 scope.go:117] "RemoveContainer" containerID="9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.033170 4783 scope.go:117] "RemoveContainer" containerID="8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.055399 4783 scope.go:117] "RemoveContainer" containerID="33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.056319 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060\": container with ID starting with 33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060 not found: ID does not exist" containerID="33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.056383 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060"} err="failed to get container status \"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060\": rpc error: code = NotFound desc = could not find container \"33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060\": container with ID starting with 33e25b770bd5a15b40960fc87a1c65415f2fa2f12e66216777be041239f94060 not found: ID does not exist" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.056418 4783 scope.go:117] "RemoveContainer" containerID="5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.056834 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a\": container with ID starting with 5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a not found: ID does not exist" containerID="5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.056865 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a"} err="failed to get container status \"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a\": rpc error: code = NotFound desc = could not find container \"5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a\": container with ID starting with 5e5c4a417fd8d76b8331fd7b6ba5586dcadd42387e2b209d79826e330824ed0a not found: ID does not exist" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.056886 4783 scope.go:117] "RemoveContainer" containerID="9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.057850 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3\": container with ID starting with 9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3 not found: ID does not exist" containerID="9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.058092 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3"} err="failed to get container status \"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3\": rpc error: code = NotFound desc = could not find container \"9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3\": container with ID starting with 9968fe2857b4dfc652fda1adff3a77973479c234e35c368054e4b8be62b1c5d3 not found: ID does not exist" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.058325 4783 scope.go:117] "RemoveContainer" containerID="8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.059114 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c\": container with ID starting with 8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c not found: ID does not exist" containerID="8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.059154 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c"} err="failed to get container status \"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c\": rpc error: code = NotFound desc = could not find container \"8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c\": container with ID starting with 8388bd729deeb5a4bfa281f55b9492a4d2cbff5976c2d784aedefb4af166950c not found: ID does not exist" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.087633 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.124447 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.124964 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.258850 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.264554 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.297371 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298015 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d9f067a-87ea-4ecf-8142-1e28d8d98574" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298036 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d9f067a-87ea-4ecf-8142-1e28d8d98574" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298066 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="init" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298074 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="init" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298086 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="dnsmasq-dns" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298093 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="dnsmasq-dns" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298106 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="sg-core" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298113 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="sg-core" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298126 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="proxy-httpd" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298134 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="proxy-httpd" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298149 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-notification-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298155 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-notification-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298165 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-central-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298171 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-central-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298192 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298200 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: E0930 13:55:45.298251 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec598217-7641-433b-938d-e2740a05a9e1" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298257 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec598217-7641-433b-938d-e2740a05a9e1" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298419 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-notification-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298432 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="sg-core" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298440 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298449 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="ceilometer-central-agent" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298458 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec598217-7641-433b-938d-e2740a05a9e1" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298465 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d9f067a-87ea-4ecf-8142-1e28d8d98574" containerName="mariadb-database-create" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298477 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" containerName="proxy-httpd" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.298498 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="dnsmasq-dns" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.300108 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.304532 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.308443 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.313997 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396463 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396497 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396537 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396554 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396575 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396631 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7hxd\" (UniqueName: \"kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.396654 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.482371 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0854-account-create-4rpvb"] Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.483559 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.485567 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498504 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7hxd\" (UniqueName: \"kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498569 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498616 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvsrv\" (UniqueName: \"kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv\") pod \"nova-cell0-0854-account-create-4rpvb\" (UID: \"ebb849f0-877d-4dc6-a589-9ae099eaac5e\") " pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498651 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498669 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498706 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498724 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.498745 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.500668 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.500752 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.503690 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.503768 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.506823 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.506898 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.519846 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7hxd\" (UniqueName: \"kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd\") pod \"ceilometer-0\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.529657 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0854-account-create-4rpvb"] Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.599407 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvsrv\" (UniqueName: \"kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv\") pod \"nova-cell0-0854-account-create-4rpvb\" (UID: \"ebb849f0-877d-4dc6-a589-9ae099eaac5e\") " pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.616691 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvsrv\" (UniqueName: \"kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv\") pod \"nova-cell0-0854-account-create-4rpvb\" (UID: \"ebb849f0-877d-4dc6-a589-9ae099eaac5e\") " pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.629955 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.749447 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:45 crc kubenswrapper[4783]: I0930 13:55:45.887047 4783 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.163015 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:46 crc kubenswrapper[4783]: W0930 13:55:46.168328 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a9459cd_c173_4d16_b3f0_0a8169a97290.slice/crio-0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032 WatchSource:0}: Error finding container 0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032: Status 404 returned error can't find the container with id 0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032 Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.269988 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c856dc5f9-xnnct" podUID="a0183b5a-e3e9-4689-ab48-ea5b73e0bd60" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: i/o timeout" Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.295551 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0854-account-create-4rpvb"] Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.332481 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.333271 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.855900 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a37fb8f-6678-4a4f-be3f-41fc6ed2408f" path="/var/lib/kubelet/pods/0a37fb8f-6678-4a4f-be3f-41fc6ed2408f/volumes" Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.895534 4783 generic.go:334] "Generic (PLEG): container finished" podID="ebb849f0-877d-4dc6-a589-9ae099eaac5e" containerID="dd4f66a67347ded33053c0941830fe501acc0e03ce96f7d2ca1c96907ddd4983" exitCode=0 Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.895592 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0854-account-create-4rpvb" event={"ID":"ebb849f0-877d-4dc6-a589-9ae099eaac5e","Type":"ContainerDied","Data":"dd4f66a67347ded33053c0941830fe501acc0e03ce96f7d2ca1c96907ddd4983"} Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.895615 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0854-account-create-4rpvb" event={"ID":"ebb849f0-877d-4dc6-a589-9ae099eaac5e","Type":"ContainerStarted","Data":"391f9337285a8db2b529af39865ee75e4606c7a0e41640515b3d5e0b7da5659b"} Sep 30 13:55:46 crc kubenswrapper[4783]: I0930 13:55:46.896770 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerStarted","Data":"0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032"} Sep 30 13:55:47 crc kubenswrapper[4783]: I0930 13:55:47.911132 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerStarted","Data":"59fb5843e88a8d0473293abcbd7df11b4d3a891f25b112ad89c3afdfa2f0b80f"} Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.324195 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.465865 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvsrv\" (UniqueName: \"kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv\") pod \"ebb849f0-877d-4dc6-a589-9ae099eaac5e\" (UID: \"ebb849f0-877d-4dc6-a589-9ae099eaac5e\") " Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.471562 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv" (OuterVolumeSpecName: "kube-api-access-kvsrv") pod "ebb849f0-877d-4dc6-a589-9ae099eaac5e" (UID: "ebb849f0-877d-4dc6-a589-9ae099eaac5e"). InnerVolumeSpecName "kube-api-access-kvsrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.568108 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvsrv\" (UniqueName: \"kubernetes.io/projected/ebb849f0-877d-4dc6-a589-9ae099eaac5e-kube-api-access-kvsrv\") on node \"crc\" DevicePath \"\"" Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.920849 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerStarted","Data":"61ed2802b332bc985b87c1b28a78873d2729b51598aa9d034d16550c9bab9ad4"} Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.922718 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0854-account-create-4rpvb" event={"ID":"ebb849f0-877d-4dc6-a589-9ae099eaac5e","Type":"ContainerDied","Data":"391f9337285a8db2b529af39865ee75e4606c7a0e41640515b3d5e0b7da5659b"} Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.922740 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0854-account-create-4rpvb" Sep 30 13:55:48 crc kubenswrapper[4783]: I0930 13:55:48.922812 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="391f9337285a8db2b529af39865ee75e4606c7a0e41640515b3d5e0b7da5659b" Sep 30 13:55:49 crc kubenswrapper[4783]: I0930 13:55:49.181040 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 13:55:49 crc kubenswrapper[4783]: I0930 13:55:49.937814 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerStarted","Data":"df1fc50ced5f6f68fdfaffc104690ef44d70fac76c5ae848f497044022ece357"} Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.737502 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-67qfh"] Sep 30 13:55:50 crc kubenswrapper[4783]: E0930 13:55:50.738058 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb849f0-877d-4dc6-a589-9ae099eaac5e" containerName="mariadb-account-create" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.738072 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb849f0-877d-4dc6-a589-9ae099eaac5e" containerName="mariadb-account-create" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.738278 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb849f0-877d-4dc6-a589-9ae099eaac5e" containerName="mariadb-account-create" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.738793 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.740518 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.741868 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.742178 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rstfl" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.770836 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-67qfh"] Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.803968 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.804047 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qnrl\" (UniqueName: \"kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.804101 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.804130 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.906054 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.906217 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qnrl\" (UniqueName: \"kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.906367 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.906400 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.910871 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.911697 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.913856 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.926738 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qnrl\" (UniqueName: \"kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl\") pod \"nova-cell0-conductor-db-sync-67qfh\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.952980 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerStarted","Data":"31bfe3ba4d5ed2336157dbaeae23b2d50af31aaf1acd49a5e31db7e5406886dd"} Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.953258 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:55:50 crc kubenswrapper[4783]: I0930 13:55:50.984943 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5666669720000002 podStartE2EDuration="5.984924958s" podCreationTimestamp="2025-09-30 13:55:45 +0000 UTC" firstStartedPulling="2025-09-30 13:55:46.170854296 +0000 UTC m=+1246.102320603" lastFinishedPulling="2025-09-30 13:55:50.589112282 +0000 UTC m=+1250.520578589" observedRunningTime="2025-09-30 13:55:50.97810682 +0000 UTC m=+1250.909573127" watchObservedRunningTime="2025-09-30 13:55:50.984924958 +0000 UTC m=+1250.916391255" Sep 30 13:55:51 crc kubenswrapper[4783]: I0930 13:55:51.088156 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:55:51 crc kubenswrapper[4783]: W0930 13:55:51.583933 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabf9589b_c83a_44a3_943f_97739903c659.slice/crio-27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e WatchSource:0}: Error finding container 27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e: Status 404 returned error can't find the container with id 27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e Sep 30 13:55:51 crc kubenswrapper[4783]: I0930 13:55:51.590481 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-67qfh"] Sep 30 13:55:51 crc kubenswrapper[4783]: I0930 13:55:51.964930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-67qfh" event={"ID":"abf9589b-c83a-44a3-943f-97739903c659","Type":"ContainerStarted","Data":"27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e"} Sep 30 13:55:54 crc kubenswrapper[4783]: I0930 13:55:54.428934 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:55:54 crc kubenswrapper[4783]: I0930 13:55:54.429541 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-central-agent" containerID="cri-o://59fb5843e88a8d0473293abcbd7df11b4d3a891f25b112ad89c3afdfa2f0b80f" gracePeriod=30 Sep 30 13:55:54 crc kubenswrapper[4783]: I0930 13:55:54.429982 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="proxy-httpd" containerID="cri-o://31bfe3ba4d5ed2336157dbaeae23b2d50af31aaf1acd49a5e31db7e5406886dd" gracePeriod=30 Sep 30 13:55:54 crc kubenswrapper[4783]: I0930 13:55:54.430041 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="sg-core" containerID="cri-o://df1fc50ced5f6f68fdfaffc104690ef44d70fac76c5ae848f497044022ece357" gracePeriod=30 Sep 30 13:55:54 crc kubenswrapper[4783]: I0930 13:55:54.430084 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-notification-agent" containerID="cri-o://61ed2802b332bc985b87c1b28a78873d2729b51598aa9d034d16550c9bab9ad4" gracePeriod=30 Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015725 4783 generic.go:334] "Generic (PLEG): container finished" podID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerID="31bfe3ba4d5ed2336157dbaeae23b2d50af31aaf1acd49a5e31db7e5406886dd" exitCode=0 Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015760 4783 generic.go:334] "Generic (PLEG): container finished" podID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerID="df1fc50ced5f6f68fdfaffc104690ef44d70fac76c5ae848f497044022ece357" exitCode=2 Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015770 4783 generic.go:334] "Generic (PLEG): container finished" podID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerID="61ed2802b332bc985b87c1b28a78873d2729b51598aa9d034d16550c9bab9ad4" exitCode=0 Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015779 4783 generic.go:334] "Generic (PLEG): container finished" podID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerID="59fb5843e88a8d0473293abcbd7df11b4d3a891f25b112ad89c3afdfa2f0b80f" exitCode=0 Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015806 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerDied","Data":"31bfe3ba4d5ed2336157dbaeae23b2d50af31aaf1acd49a5e31db7e5406886dd"} Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015844 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerDied","Data":"df1fc50ced5f6f68fdfaffc104690ef44d70fac76c5ae848f497044022ece357"} Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015858 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerDied","Data":"61ed2802b332bc985b87c1b28a78873d2729b51598aa9d034d16550c9bab9ad4"} Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.015868 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerDied","Data":"59fb5843e88a8d0473293abcbd7df11b4d3a891f25b112ad89c3afdfa2f0b80f"} Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.410014 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1d76-account-create-qvfvc"] Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.411450 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.413133 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.463125 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1d76-account-create-qvfvc"] Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.505897 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqdr5\" (UniqueName: \"kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5\") pod \"nova-api-1d76-account-create-qvfvc\" (UID: \"c989ff95-97ef-4a67-af97-0359d59c5392\") " pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.607502 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqdr5\" (UniqueName: \"kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5\") pod \"nova-api-1d76-account-create-qvfvc\" (UID: \"c989ff95-97ef-4a67-af97-0359d59c5392\") " pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.620658 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2a43-account-create-hjmwp"] Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.621817 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.625141 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.628389 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqdr5\" (UniqueName: \"kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5\") pod \"nova-api-1d76-account-create-qvfvc\" (UID: \"c989ff95-97ef-4a67-af97-0359d59c5392\") " pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.630489 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2a43-account-create-hjmwp"] Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.709795 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gzvm\" (UniqueName: \"kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm\") pod \"nova-cell1-2a43-account-create-hjmwp\" (UID: \"8655922a-c182-49c1-aa15-6f47bd279990\") " pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.737781 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.812351 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gzvm\" (UniqueName: \"kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm\") pod \"nova-cell1-2a43-account-create-hjmwp\" (UID: \"8655922a-c182-49c1-aa15-6f47bd279990\") " pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.840546 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gzvm\" (UniqueName: \"kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm\") pod \"nova-cell1-2a43-account-create-hjmwp\" (UID: \"8655922a-c182-49c1-aa15-6f47bd279990\") " pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:55:55 crc kubenswrapper[4783]: I0930 13:55:55.985069 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:55:59 crc kubenswrapper[4783]: I0930 13:55:59.949002 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1d76-account-create-qvfvc"] Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.060636 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1d76-account-create-qvfvc" event={"ID":"c989ff95-97ef-4a67-af97-0359d59c5392","Type":"ContainerStarted","Data":"e834d9b39b5be11165f2e24f8b6ca2d0e0c883a3efce7eb5689527376f5e2b58"} Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.063088 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6a9459cd-c173-4d16-b3f0-0a8169a97290","Type":"ContainerDied","Data":"0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032"} Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.063113 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0571daa5d5d3dae2ebc2258c40ea188394f984042f855b08b7215b40d401d032" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.154356 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.192863 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.192915 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193048 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193126 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193194 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193278 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193380 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7hxd\" (UniqueName: \"kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd\") pod \"6a9459cd-c173-4d16-b3f0-0a8169a97290\" (UID: \"6a9459cd-c173-4d16-b3f0-0a8169a97290\") " Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.193926 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.196973 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.197940 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd" (OuterVolumeSpecName: "kube-api-access-d7hxd") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "kube-api-access-d7hxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.202060 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts" (OuterVolumeSpecName: "scripts") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.228483 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.261462 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2a43-account-create-hjmwp"] Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.295794 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.295842 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7hxd\" (UniqueName: \"kubernetes.io/projected/6a9459cd-c173-4d16-b3f0-0a8169a97290-kube-api-access-d7hxd\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.295860 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6a9459cd-c173-4d16-b3f0-0a8169a97290-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.295872 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.295883 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.318794 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.337710 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data" (OuterVolumeSpecName: "config-data") pod "6a9459cd-c173-4d16-b3f0-0a8169a97290" (UID: "6a9459cd-c173-4d16-b3f0-0a8169a97290"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.399669 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.399705 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a9459cd-c173-4d16-b3f0-0a8169a97290-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:00 crc kubenswrapper[4783]: I0930 13:56:00.899842 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.074438 4783 generic.go:334] "Generic (PLEG): container finished" podID="8655922a-c182-49c1-aa15-6f47bd279990" containerID="8a9e4d510cc8a81c1806a28e9179a238fbbf2ed19d8dc5cb74a39c6802f3a6c0" exitCode=0 Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.074501 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2a43-account-create-hjmwp" event={"ID":"8655922a-c182-49c1-aa15-6f47bd279990","Type":"ContainerDied","Data":"8a9e4d510cc8a81c1806a28e9179a238fbbf2ed19d8dc5cb74a39c6802f3a6c0"} Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.074527 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2a43-account-create-hjmwp" event={"ID":"8655922a-c182-49c1-aa15-6f47bd279990","Type":"ContainerStarted","Data":"c178f3c2c52dead5de3687b1caf342e6c59b75d49652b72279a50b6ab7e68ebd"} Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.077116 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-67qfh" event={"ID":"abf9589b-c83a-44a3-943f-97739903c659","Type":"ContainerStarted","Data":"1c6d456af6501da0a87be7ace5190030c2228ba920c68f561c9e14c6789bd6e6"} Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.080154 4783 generic.go:334] "Generic (PLEG): container finished" podID="c989ff95-97ef-4a67-af97-0359d59c5392" containerID="d1e1e216a0a4b5e90fb3243099d733cadcc1f3c31850aa187e3b905cce41f140" exitCode=0 Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.080254 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.080923 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1d76-account-create-qvfvc" event={"ID":"c989ff95-97ef-4a67-af97-0359d59c5392","Type":"ContainerDied","Data":"d1e1e216a0a4b5e90fb3243099d733cadcc1f3c31850aa187e3b905cce41f140"} Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.126642 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-67qfh" podStartSLOduration=2.907504903 podStartE2EDuration="11.126613582s" podCreationTimestamp="2025-09-30 13:55:50 +0000 UTC" firstStartedPulling="2025-09-30 13:55:51.586192718 +0000 UTC m=+1251.517659025" lastFinishedPulling="2025-09-30 13:55:59.805301387 +0000 UTC m=+1259.736767704" observedRunningTime="2025-09-30 13:56:01.121321653 +0000 UTC m=+1261.052787960" watchObservedRunningTime="2025-09-30 13:56:01.126613582 +0000 UTC m=+1261.058079899" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.159523 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.167471 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.183871 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:56:01 crc kubenswrapper[4783]: E0930 13:56:01.184290 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="proxy-httpd" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184309 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="proxy-httpd" Sep 30 13:56:01 crc kubenswrapper[4783]: E0930 13:56:01.184337 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-notification-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184346 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-notification-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: E0930 13:56:01.184365 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="sg-core" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184371 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="sg-core" Sep 30 13:56:01 crc kubenswrapper[4783]: E0930 13:56:01.184393 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-central-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184399 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-central-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184560 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-notification-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184579 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="ceilometer-central-agent" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184589 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="sg-core" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.184602 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" containerName="proxy-httpd" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.187215 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.189094 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.189175 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.193185 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214701 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214763 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214788 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214818 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214847 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214865 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2lhl\" (UniqueName: \"kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.214884 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316203 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316270 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316302 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316335 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316364 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316380 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2lhl\" (UniqueName: \"kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.316396 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.317158 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.317309 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.322835 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.323862 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.329734 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.330459 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.332412 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2lhl\" (UniqueName: \"kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl\") pod \"ceilometer-0\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.503532 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:56:01 crc kubenswrapper[4783]: W0930 13:56:01.985000 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd104d752_45e0_4a40_94e0_12366cbd1327.slice/crio-fc20c88679e764b942adb45e7d73289fbd19f49ad60261bd693a14e0e07630a1 WatchSource:0}: Error finding container fc20c88679e764b942adb45e7d73289fbd19f49ad60261bd693a14e0e07630a1: Status 404 returned error can't find the container with id fc20c88679e764b942adb45e7d73289fbd19f49ad60261bd693a14e0e07630a1 Sep 30 13:56:01 crc kubenswrapper[4783]: I0930 13:56:01.989424 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.097285 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerStarted","Data":"fc20c88679e764b942adb45e7d73289fbd19f49ad60261bd693a14e0e07630a1"} Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.561311 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.566471 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.642479 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gzvm\" (UniqueName: \"kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm\") pod \"8655922a-c182-49c1-aa15-6f47bd279990\" (UID: \"8655922a-c182-49c1-aa15-6f47bd279990\") " Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.642718 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqdr5\" (UniqueName: \"kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5\") pod \"c989ff95-97ef-4a67-af97-0359d59c5392\" (UID: \"c989ff95-97ef-4a67-af97-0359d59c5392\") " Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.647412 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5" (OuterVolumeSpecName: "kube-api-access-xqdr5") pod "c989ff95-97ef-4a67-af97-0359d59c5392" (UID: "c989ff95-97ef-4a67-af97-0359d59c5392"). InnerVolumeSpecName "kube-api-access-xqdr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.647479 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm" (OuterVolumeSpecName: "kube-api-access-6gzvm") pod "8655922a-c182-49c1-aa15-6f47bd279990" (UID: "8655922a-c182-49c1-aa15-6f47bd279990"). InnerVolumeSpecName "kube-api-access-6gzvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.744570 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqdr5\" (UniqueName: \"kubernetes.io/projected/c989ff95-97ef-4a67-af97-0359d59c5392-kube-api-access-xqdr5\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.744602 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gzvm\" (UniqueName: \"kubernetes.io/projected/8655922a-c182-49c1-aa15-6f47bd279990-kube-api-access-6gzvm\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:02 crc kubenswrapper[4783]: I0930 13:56:02.854053 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a9459cd-c173-4d16-b3f0-0a8169a97290" path="/var/lib/kubelet/pods/6a9459cd-c173-4d16-b3f0-0a8169a97290/volumes" Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.109570 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1d76-account-create-qvfvc" event={"ID":"c989ff95-97ef-4a67-af97-0359d59c5392","Type":"ContainerDied","Data":"e834d9b39b5be11165f2e24f8b6ca2d0e0c883a3efce7eb5689527376f5e2b58"} Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.109824 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e834d9b39b5be11165f2e24f8b6ca2d0e0c883a3efce7eb5689527376f5e2b58" Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.109597 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1d76-account-create-qvfvc" Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.111941 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerStarted","Data":"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0"} Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.114749 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2a43-account-create-hjmwp" event={"ID":"8655922a-c182-49c1-aa15-6f47bd279990","Type":"ContainerDied","Data":"c178f3c2c52dead5de3687b1caf342e6c59b75d49652b72279a50b6ab7e68ebd"} Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.114791 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c178f3c2c52dead5de3687b1caf342e6c59b75d49652b72279a50b6ab7e68ebd" Sep 30 13:56:03 crc kubenswrapper[4783]: I0930 13:56:03.114796 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2a43-account-create-hjmwp" Sep 30 13:56:04 crc kubenswrapper[4783]: I0930 13:56:04.125528 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerStarted","Data":"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165"} Sep 30 13:56:04 crc kubenswrapper[4783]: I0930 13:56:04.241945 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:56:04 crc kubenswrapper[4783]: I0930 13:56:04.304781 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:56:04 crc kubenswrapper[4783]: I0930 13:56:04.305046 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5d5b9bf7c6-787zm" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-api" containerID="cri-o://60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049" gracePeriod=30 Sep 30 13:56:04 crc kubenswrapper[4783]: I0930 13:56:04.305203 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5d5b9bf7c6-787zm" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-httpd" containerID="cri-o://0146ef3582b3f46f0018759ff04f24fa8d7c3428f688adbf7c784affe0d1a231" gracePeriod=30 Sep 30 13:56:05 crc kubenswrapper[4783]: I0930 13:56:05.153968 4783 generic.go:334] "Generic (PLEG): container finished" podID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerID="0146ef3582b3f46f0018759ff04f24fa8d7c3428f688adbf7c784affe0d1a231" exitCode=0 Sep 30 13:56:05 crc kubenswrapper[4783]: I0930 13:56:05.155136 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerDied","Data":"0146ef3582b3f46f0018759ff04f24fa8d7c3428f688adbf7c784affe0d1a231"} Sep 30 13:56:12 crc kubenswrapper[4783]: I0930 13:56:12.245789 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerStarted","Data":"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe"} Sep 30 13:56:20 crc kubenswrapper[4783]: I0930 13:56:20.344344 4783 generic.go:334] "Generic (PLEG): container finished" podID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerID="60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049" exitCode=0 Sep 30 13:56:20 crc kubenswrapper[4783]: I0930 13:56:20.344533 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerDied","Data":"60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049"} Sep 30 13:56:20 crc kubenswrapper[4783]: E0930 13:56:20.391422 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43d0d755_e680_4fd1_937f_bf1b03c82289.slice/crio-conmon-60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049.scope\": RecentStats: unable to find data in memory cache]" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.060401 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.236919 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle\") pod \"43d0d755-e680-4fd1-937f-bf1b03c82289\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.237244 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs\") pod \"43d0d755-e680-4fd1-937f-bf1b03c82289\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.237367 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr9hw\" (UniqueName: \"kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw\") pod \"43d0d755-e680-4fd1-937f-bf1b03c82289\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.237611 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config\") pod \"43d0d755-e680-4fd1-937f-bf1b03c82289\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.237795 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config\") pod \"43d0d755-e680-4fd1-937f-bf1b03c82289\" (UID: \"43d0d755-e680-4fd1-937f-bf1b03c82289\") " Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.245268 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "43d0d755-e680-4fd1-937f-bf1b03c82289" (UID: "43d0d755-e680-4fd1-937f-bf1b03c82289"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.245594 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw" (OuterVolumeSpecName: "kube-api-access-rr9hw") pod "43d0d755-e680-4fd1-937f-bf1b03c82289" (UID: "43d0d755-e680-4fd1-937f-bf1b03c82289"). InnerVolumeSpecName "kube-api-access-rr9hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.301717 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43d0d755-e680-4fd1-937f-bf1b03c82289" (UID: "43d0d755-e680-4fd1-937f-bf1b03c82289"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.313510 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "43d0d755-e680-4fd1-937f-bf1b03c82289" (UID: "43d0d755-e680-4fd1-937f-bf1b03c82289"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.314831 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config" (OuterVolumeSpecName: "config") pod "43d0d755-e680-4fd1-937f-bf1b03c82289" (UID: "43d0d755-e680-4fd1-937f-bf1b03c82289"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.339830 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.339872 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.339884 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.339895 4783 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d0d755-e680-4fd1-937f-bf1b03c82289-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.339907 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr9hw\" (UniqueName: \"kubernetes.io/projected/43d0d755-e680-4fd1-937f-bf1b03c82289-kube-api-access-rr9hw\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.357504 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d5b9bf7c6-787zm" event={"ID":"43d0d755-e680-4fd1-937f-bf1b03c82289","Type":"ContainerDied","Data":"4c63b28991caaee14fa3ecec26bf3ef5a6c5499beb811d601297221fc5f7990d"} Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.357575 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d5b9bf7c6-787zm" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.357583 4783 scope.go:117] "RemoveContainer" containerID="0146ef3582b3f46f0018759ff04f24fa8d7c3428f688adbf7c784affe0d1a231" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.390784 4783 scope.go:117] "RemoveContainer" containerID="60ebd235fc76c16ed44f02b3ce1ffc6e69b72508f0544269c7cc23bd33db8049" Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.391752 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:56:21 crc kubenswrapper[4783]: I0930 13:56:21.400209 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5d5b9bf7c6-787zm"] Sep 30 13:56:22 crc kubenswrapper[4783]: I0930 13:56:22.399615 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerStarted","Data":"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0"} Sep 30 13:56:22 crc kubenswrapper[4783]: I0930 13:56:22.401193 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:56:22 crc kubenswrapper[4783]: I0930 13:56:22.862602 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" path="/var/lib/kubelet/pods/43d0d755-e680-4fd1-937f-bf1b03c82289/volumes" Sep 30 13:56:36 crc kubenswrapper[4783]: I0930 13:56:36.556997 4783 generic.go:334] "Generic (PLEG): container finished" podID="abf9589b-c83a-44a3-943f-97739903c659" containerID="1c6d456af6501da0a87be7ace5190030c2228ba920c68f561c9e14c6789bd6e6" exitCode=0 Sep 30 13:56:36 crc kubenswrapper[4783]: I0930 13:56:36.557094 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-67qfh" event={"ID":"abf9589b-c83a-44a3-943f-97739903c659","Type":"ContainerDied","Data":"1c6d456af6501da0a87be7ace5190030c2228ba920c68f561c9e14c6789bd6e6"} Sep 30 13:56:36 crc kubenswrapper[4783]: I0930 13:56:36.576390 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=15.743664637 podStartE2EDuration="35.576375545s" podCreationTimestamp="2025-09-30 13:56:01 +0000 UTC" firstStartedPulling="2025-09-30 13:56:01.988903061 +0000 UTC m=+1261.920369368" lastFinishedPulling="2025-09-30 13:56:21.821613949 +0000 UTC m=+1281.753080276" observedRunningTime="2025-09-30 13:56:22.426406022 +0000 UTC m=+1282.357872369" watchObservedRunningTime="2025-09-30 13:56:36.576375545 +0000 UTC m=+1296.507841852" Sep 30 13:56:37 crc kubenswrapper[4783]: I0930 13:56:37.940833 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.047525 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qnrl\" (UniqueName: \"kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl\") pod \"abf9589b-c83a-44a3-943f-97739903c659\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.047701 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data\") pod \"abf9589b-c83a-44a3-943f-97739903c659\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.047766 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle\") pod \"abf9589b-c83a-44a3-943f-97739903c659\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.047882 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts\") pod \"abf9589b-c83a-44a3-943f-97739903c659\" (UID: \"abf9589b-c83a-44a3-943f-97739903c659\") " Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.054206 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts" (OuterVolumeSpecName: "scripts") pod "abf9589b-c83a-44a3-943f-97739903c659" (UID: "abf9589b-c83a-44a3-943f-97739903c659"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.057346 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl" (OuterVolumeSpecName: "kube-api-access-9qnrl") pod "abf9589b-c83a-44a3-943f-97739903c659" (UID: "abf9589b-c83a-44a3-943f-97739903c659"). InnerVolumeSpecName "kube-api-access-9qnrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.078701 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data" (OuterVolumeSpecName: "config-data") pod "abf9589b-c83a-44a3-943f-97739903c659" (UID: "abf9589b-c83a-44a3-943f-97739903c659"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.085950 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abf9589b-c83a-44a3-943f-97739903c659" (UID: "abf9589b-c83a-44a3-943f-97739903c659"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.150398 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.150445 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.150475 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qnrl\" (UniqueName: \"kubernetes.io/projected/abf9589b-c83a-44a3-943f-97739903c659-kube-api-access-9qnrl\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.150502 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf9589b-c83a-44a3-943f-97739903c659-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.587861 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-67qfh" event={"ID":"abf9589b-c83a-44a3-943f-97739903c659","Type":"ContainerDied","Data":"27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e"} Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.587916 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27744ad0f04426e16ec6428be8507f13184fddb3a6ecc0f848208bdd18184e4e" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.587946 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-67qfh" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.756458 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:56:38 crc kubenswrapper[4783]: E0930 13:56:38.757343 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8655922a-c182-49c1-aa15-6f47bd279990" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757364 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8655922a-c182-49c1-aa15-6f47bd279990" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: E0930 13:56:38.757387 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c989ff95-97ef-4a67-af97-0359d59c5392" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757397 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c989ff95-97ef-4a67-af97-0359d59c5392" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: E0930 13:56:38.757411 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf9589b-c83a-44a3-943f-97739903c659" containerName="nova-cell0-conductor-db-sync" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757419 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf9589b-c83a-44a3-943f-97739903c659" containerName="nova-cell0-conductor-db-sync" Sep 30 13:56:38 crc kubenswrapper[4783]: E0930 13:56:38.757429 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-httpd" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757437 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-httpd" Sep 30 13:56:38 crc kubenswrapper[4783]: E0930 13:56:38.757470 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-api" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757479 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-api" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757723 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-api" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757744 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c989ff95-97ef-4a67-af97-0359d59c5392" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757757 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf9589b-c83a-44a3-943f-97739903c659" containerName="nova-cell0-conductor-db-sync" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757776 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8655922a-c182-49c1-aa15-6f47bd279990" containerName="mariadb-account-create" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.757794 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="43d0d755-e680-4fd1-937f-bf1b03c82289" containerName="neutron-httpd" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.758587 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.763050 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.763066 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rstfl" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.765850 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.863838 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.863905 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.863930 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qjjq\" (UniqueName: \"kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.966015 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.966079 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.966105 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qjjq\" (UniqueName: \"kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.975013 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.975077 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:38 crc kubenswrapper[4783]: I0930 13:56:38.982857 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qjjq\" (UniqueName: \"kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq\") pod \"nova-cell0-conductor-0\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:39 crc kubenswrapper[4783]: I0930 13:56:39.078605 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:39 crc kubenswrapper[4783]: I0930 13:56:39.536147 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:56:39 crc kubenswrapper[4783]: W0930 13:56:39.537753 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f157470_67d0_452c_9959_a452400c02d7.slice/crio-ef6c2c73e5a76bc989b713328ac39c4f0403911c01b172c51189e18f3e0e871c WatchSource:0}: Error finding container ef6c2c73e5a76bc989b713328ac39c4f0403911c01b172c51189e18f3e0e871c: Status 404 returned error can't find the container with id ef6c2c73e5a76bc989b713328ac39c4f0403911c01b172c51189e18f3e0e871c Sep 30 13:56:39 crc kubenswrapper[4783]: I0930 13:56:39.599424 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8f157470-67d0-452c-9959-a452400c02d7","Type":"ContainerStarted","Data":"ef6c2c73e5a76bc989b713328ac39c4f0403911c01b172c51189e18f3e0e871c"} Sep 30 13:56:40 crc kubenswrapper[4783]: I0930 13:56:40.619170 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8f157470-67d0-452c-9959-a452400c02d7","Type":"ContainerStarted","Data":"48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980"} Sep 30 13:56:40 crc kubenswrapper[4783]: I0930 13:56:40.619630 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:40 crc kubenswrapper[4783]: I0930 13:56:40.652917 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.652898085 podStartE2EDuration="2.652898085s" podCreationTimestamp="2025-09-30 13:56:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:56:40.649654481 +0000 UTC m=+1300.581120808" watchObservedRunningTime="2025-09-30 13:56:40.652898085 +0000 UTC m=+1300.584364392" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.107628 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.628250 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-g9gmd"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.630847 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.632808 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.635521 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.649607 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g9gmd"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.676185 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.676285 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwhm9\" (UniqueName: \"kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.676343 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.676431 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.764824 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.766407 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.768379 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.779397 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780657 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780751 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwhm9\" (UniqueName: \"kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780781 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780857 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780957 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpqv9\" (UniqueName: \"kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.780990 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.781015 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.789549 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.797950 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.798242 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.802507 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.829232 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.830675 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.840726 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.856052 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwhm9\" (UniqueName: \"kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9\") pod \"nova-cell0-cell-mapping-g9gmd\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.860146 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.893861 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpqv9\" (UniqueName: \"kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.893958 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.894173 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.894313 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.894900 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.907637 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.911964 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.946792 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpqv9\" (UniqueName: \"kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9\") pod \"nova-api-0\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " pod="openstack/nova-api-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.961033 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.962854 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.969191 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 13:56:44 crc kubenswrapper[4783]: I0930 13:56:44.972599 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:44.998628 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kxh2\" (UniqueName: \"kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.000742 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.001127 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.013042 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.041519 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.042803 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.053534 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.084295 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.085840 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.102624 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.102691 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.102724 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxs9j\" (UniqueName: \"kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.102772 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kxh2\" (UniqueName: \"kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.102810 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.103064 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.103185 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.109834 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.110071 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.124008 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.125800 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.128425 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kxh2\" (UniqueName: \"kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2\") pod \"nova-scheduler-0\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.133747 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205042 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205379 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205401 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxs9j\" (UniqueName: \"kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205433 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27vmq\" (UniqueName: \"kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205467 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205507 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.205596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.206262 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.211581 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.212213 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.232844 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxs9j\" (UniqueName: \"kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j\") pod \"nova-metadata-0\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307483 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307601 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307684 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27vmq\" (UniqueName: \"kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307808 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307866 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g229n\" (UniqueName: \"kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307922 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.307949 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.308046 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.312376 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.313682 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.331991 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27vmq\" (UniqueName: \"kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.375294 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.398755 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409648 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409738 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409772 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g229n\" (UniqueName: \"kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409793 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409817 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.409884 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.410562 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.410967 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.410969 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.411082 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.411307 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.412537 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.433808 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g229n\" (UniqueName: \"kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n\") pod \"dnsmasq-dns-7d9cc4c77f-bb8wx\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.457817 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.562336 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g9gmd"] Sep 30 13:56:45 crc kubenswrapper[4783]: W0930 13:56:45.706916 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50057626_1b68_42b2_8f24_d6f4a8e13525.slice/crio-6051fadac4c90e9e99f82540facd5de4093a2e5997a9f240be5fdcb6fccc71d1 WatchSource:0}: Error finding container 6051fadac4c90e9e99f82540facd5de4093a2e5997a9f240be5fdcb6fccc71d1: Status 404 returned error can't find the container with id 6051fadac4c90e9e99f82540facd5de4093a2e5997a9f240be5fdcb6fccc71d1 Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.708404 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.708867 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g9gmd" event={"ID":"8fe5df32-3b3d-486a-abe4-0e04c91c54c6","Type":"ContainerStarted","Data":"9c6bb2f0225e14b696d50ce4b2cbb1db6296c897d8ec537f81fc0e306bf11055"} Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.723883 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mlmst"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.725496 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.732541 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.735202 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.744503 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mlmst"] Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.919231 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf5r9\" (UniqueName: \"kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.919552 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.919673 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.919702 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:45 crc kubenswrapper[4783]: I0930 13:56:45.919987 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.018638 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:56:46 crc kubenswrapper[4783]: W0930 13:56:46.020796 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2f282a_74c3_4823_a2cb_7c002d18eedb.slice/crio-aaa549b6b271542c831a0317352698f5fb6c91f3ba447ba08bbaf3cdeaff8512 WatchSource:0}: Error finding container aaa549b6b271542c831a0317352698f5fb6c91f3ba447ba08bbaf3cdeaff8512: Status 404 returned error can't find the container with id aaa549b6b271542c831a0317352698f5fb6c91f3ba447ba08bbaf3cdeaff8512 Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.037472 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.037541 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.037700 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf5r9\" (UniqueName: \"kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.037732 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.041289 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.044709 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.047690 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.050028 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.054633 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf5r9\" (UniqueName: \"kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9\") pod \"nova-cell1-conductor-db-sync-mlmst\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.084502 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.190313 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.617590 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mlmst"] Sep 30 13:56:46 crc kubenswrapper[4783]: W0930 13:56:46.624132 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod523d7b8c_32a8_4235_b665_b657176a8001.slice/crio-7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1 WatchSource:0}: Error finding container 7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1: Status 404 returned error can't find the container with id 7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1 Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.744433 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf","Type":"ContainerStarted","Data":"00e35931a1b61670f0905f043738e8a9194025059af5084bb775d5a1dc221d5f"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.746763 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerStarted","Data":"6051fadac4c90e9e99f82540facd5de4093a2e5997a9f240be5fdcb6fccc71d1"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.748569 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g9gmd" event={"ID":"8fe5df32-3b3d-486a-abe4-0e04c91c54c6","Type":"ContainerStarted","Data":"1add237bfa70eb79e74526b5f2695b642b54a5e84d25e87dae1aef8e46b7ab79"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.770212 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f2f282a-74c3-4823-a2cb-7c002d18eedb","Type":"ContainerStarted","Data":"aaa549b6b271542c831a0317352698f5fb6c91f3ba447ba08bbaf3cdeaff8512"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.784173 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-g9gmd" podStartSLOduration=2.7841480499999998 podStartE2EDuration="2.78414805s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:56:46.778912563 +0000 UTC m=+1306.710378870" watchObservedRunningTime="2025-09-30 13:56:46.78414805 +0000 UTC m=+1306.715614367" Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.845804 4783 generic.go:334] "Generic (PLEG): container finished" podID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerID="1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61" exitCode=0 Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.934644 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" event={"ID":"2c3128f8-342d-46a4-a539-bfd0942a8b86","Type":"ContainerDied","Data":"1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.934690 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" event={"ID":"2c3128f8-342d-46a4-a539-bfd0942a8b86","Type":"ContainerStarted","Data":"1ff90852cf36ac9f817b1875b7b4422275dd5d46ea31850605d1b3e74b3d4d48"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.934702 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerStarted","Data":"3db5dbfa3106c115a71b0cc72d738118c755ced7f2a3917604a28a2e53480152"} Sep 30 13:56:46 crc kubenswrapper[4783]: I0930 13:56:46.934713 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mlmst" event={"ID":"523d7b8c-32a8-4235-b665-b657176a8001","Type":"ContainerStarted","Data":"7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1"} Sep 30 13:56:47 crc kubenswrapper[4783]: I0930 13:56:47.938753 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" event={"ID":"2c3128f8-342d-46a4-a539-bfd0942a8b86","Type":"ContainerStarted","Data":"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05"} Sep 30 13:56:47 crc kubenswrapper[4783]: I0930 13:56:47.939209 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:47 crc kubenswrapper[4783]: I0930 13:56:47.940800 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mlmst" event={"ID":"523d7b8c-32a8-4235-b665-b657176a8001","Type":"ContainerStarted","Data":"d40de3bbab8c0ae54ee3ede870b97a3dc0b58b8f8e8ce00329d52caf9cd21044"} Sep 30 13:56:47 crc kubenswrapper[4783]: I0930 13:56:47.968890 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" podStartSLOduration=3.968870051 podStartE2EDuration="3.968870051s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:56:47.963657223 +0000 UTC m=+1307.895123530" watchObservedRunningTime="2025-09-30 13:56:47.968870051 +0000 UTC m=+1307.900336368" Sep 30 13:56:47 crc kubenswrapper[4783]: I0930 13:56:47.990109 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-mlmst" podStartSLOduration=2.990091708 podStartE2EDuration="2.990091708s" podCreationTimestamp="2025-09-30 13:56:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:56:47.982758124 +0000 UTC m=+1307.914224431" watchObservedRunningTime="2025-09-30 13:56:47.990091708 +0000 UTC m=+1307.921558015" Sep 30 13:56:48 crc kubenswrapper[4783]: I0930 13:56:48.963874 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.026858 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.973904 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf","Type":"ContainerStarted","Data":"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.977050 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerStarted","Data":"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.977091 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerStarted","Data":"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.983764 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f2f282a-74c3-4823-a2cb-7c002d18eedb","Type":"ContainerStarted","Data":"b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.983891 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572" gracePeriod=30 Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.987524 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerStarted","Data":"73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.987575 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerStarted","Data":"e6fddfa8b81d2a83d3447008627b220e5a1a82c63b8656f14d2cbc72ff3c8250"} Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.987733 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-log" containerID="cri-o://e6fddfa8b81d2a83d3447008627b220e5a1a82c63b8656f14d2cbc72ff3c8250" gracePeriod=30 Sep 30 13:56:49 crc kubenswrapper[4783]: I0930 13:56:49.987874 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-metadata" containerID="cri-o://73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412" gracePeriod=30 Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.000432 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.5750742989999997 podStartE2EDuration="6.000410185s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="2025-09-30 13:56:45.942573672 +0000 UTC m=+1305.874039979" lastFinishedPulling="2025-09-30 13:56:49.367909558 +0000 UTC m=+1309.299375865" observedRunningTime="2025-09-30 13:56:49.989692263 +0000 UTC m=+1309.921158590" watchObservedRunningTime="2025-09-30 13:56:50.000410185 +0000 UTC m=+1309.931876492" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.025123 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.707275104 podStartE2EDuration="6.025096844s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="2025-09-30 13:56:46.05800143 +0000 UTC m=+1305.989467737" lastFinishedPulling="2025-09-30 13:56:49.37582317 +0000 UTC m=+1309.307289477" observedRunningTime="2025-09-30 13:56:50.010526458 +0000 UTC m=+1309.941992765" watchObservedRunningTime="2025-09-30 13:56:50.025096844 +0000 UTC m=+1309.956563151" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.042581 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.382548139 podStartE2EDuration="6.042559512s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="2025-09-30 13:56:45.709771034 +0000 UTC m=+1305.641237341" lastFinishedPulling="2025-09-30 13:56:49.369782407 +0000 UTC m=+1309.301248714" observedRunningTime="2025-09-30 13:56:50.029663149 +0000 UTC m=+1309.961129456" watchObservedRunningTime="2025-09-30 13:56:50.042559512 +0000 UTC m=+1309.974025819" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.060118 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.734859756 podStartE2EDuration="6.060102323s" podCreationTimestamp="2025-09-30 13:56:44 +0000 UTC" firstStartedPulling="2025-09-30 13:56:46.044286622 +0000 UTC m=+1305.975752929" lastFinishedPulling="2025-09-30 13:56:49.369529189 +0000 UTC m=+1309.300995496" observedRunningTime="2025-09-30 13:56:50.056391184 +0000 UTC m=+1309.987857491" watchObservedRunningTime="2025-09-30 13:56:50.060102323 +0000 UTC m=+1309.991568630" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.375854 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.399103 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.400317 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.414005 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:56:50 crc kubenswrapper[4783]: I0930 13:56:50.999866 4783 generic.go:334] "Generic (PLEG): container finished" podID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerID="e6fddfa8b81d2a83d3447008627b220e5a1a82c63b8656f14d2cbc72ff3c8250" exitCode=143 Sep 30 13:56:51 crc kubenswrapper[4783]: I0930 13:56:51.000730 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerDied","Data":"e6fddfa8b81d2a83d3447008627b220e5a1a82c63b8656f14d2cbc72ff3c8250"} Sep 30 13:56:54 crc kubenswrapper[4783]: I0930 13:56:54.057070 4783 generic.go:334] "Generic (PLEG): container finished" podID="8fe5df32-3b3d-486a-abe4-0e04c91c54c6" containerID="1add237bfa70eb79e74526b5f2695b642b54a5e84d25e87dae1aef8e46b7ab79" exitCode=0 Sep 30 13:56:54 crc kubenswrapper[4783]: I0930 13:56:54.057151 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g9gmd" event={"ID":"8fe5df32-3b3d-486a-abe4-0e04c91c54c6","Type":"ContainerDied","Data":"1add237bfa70eb79e74526b5f2695b642b54a5e84d25e87dae1aef8e46b7ab79"} Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.085305 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.086555 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.376186 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.402943 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.459707 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.472585 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.535588 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.535865 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="dnsmasq-dns" containerID="cri-o://bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32" gracePeriod=10 Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.585108 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data\") pod \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.585183 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle\") pod \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.585209 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts\") pod \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.585250 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwhm9\" (UniqueName: \"kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9\") pod \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\" (UID: \"8fe5df32-3b3d-486a-abe4-0e04c91c54c6\") " Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.597295 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts" (OuterVolumeSpecName: "scripts") pod "8fe5df32-3b3d-486a-abe4-0e04c91c54c6" (UID: "8fe5df32-3b3d-486a-abe4-0e04c91c54c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.597515 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9" (OuterVolumeSpecName: "kube-api-access-wwhm9") pod "8fe5df32-3b3d-486a-abe4-0e04c91c54c6" (UID: "8fe5df32-3b3d-486a-abe4-0e04c91c54c6"). InnerVolumeSpecName "kube-api-access-wwhm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.621119 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fe5df32-3b3d-486a-abe4-0e04c91c54c6" (UID: "8fe5df32-3b3d-486a-abe4-0e04c91c54c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.681807 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data" (OuterVolumeSpecName: "config-data") pod "8fe5df32-3b3d-486a-abe4-0e04c91c54c6" (UID: "8fe5df32-3b3d-486a-abe4-0e04c91c54c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.688081 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.688120 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.688133 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwhm9\" (UniqueName: \"kubernetes.io/projected/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-kube-api-access-wwhm9\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.688146 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe5df32-3b3d-486a-abe4-0e04c91c54c6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:55 crc kubenswrapper[4783]: I0930 13:56:55.990980 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.083969 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g9gmd" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.083979 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g9gmd" event={"ID":"8fe5df32-3b3d-486a-abe4-0e04c91c54c6","Type":"ContainerDied","Data":"9c6bb2f0225e14b696d50ce4b2cbb1db6296c897d8ec537f81fc0e306bf11055"} Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.084433 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c6bb2f0225e14b696d50ce4b2cbb1db6296c897d8ec537f81fc0e306bf11055" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.090171 4783 generic.go:334] "Generic (PLEG): container finished" podID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerID="bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32" exitCode=0 Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.090263 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" event={"ID":"9f29adc0-f647-4bb9-98fc-8124c7f30a2d","Type":"ContainerDied","Data":"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32"} Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.090275 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.090330 4783 scope.go:117] "RemoveContainer" containerID="bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.090316 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" event={"ID":"9f29adc0-f647-4bb9-98fc-8124c7f30a2d","Type":"ContainerDied","Data":"b0d9261b33eafba6dba7c4c5437b90ab5df0813b0205b89ea1cb059c94627a84"} Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.095879 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.096055 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.096089 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.096118 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.096172 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6h2n\" (UniqueName: \"kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.096233 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0\") pod \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\" (UID: \"9f29adc0-f647-4bb9-98fc-8124c7f30a2d\") " Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.103201 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n" (OuterVolumeSpecName: "kube-api-access-v6h2n") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "kube-api-access-v6h2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.124111 4783 scope.go:117] "RemoveContainer" containerID="1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.157708 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.170393 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.170459 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.174916 4783 scope.go:117] "RemoveContainer" containerID="bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32" Sep 30 13:56:56 crc kubenswrapper[4783]: E0930 13:56:56.176549 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32\": container with ID starting with bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32 not found: ID does not exist" containerID="bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.176607 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32"} err="failed to get container status \"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32\": rpc error: code = NotFound desc = could not find container \"bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32\": container with ID starting with bf3b17d296f0f00755380f1d48da695f1bea8e7e526ee3f11e4f86655b891c32 not found: ID does not exist" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.176640 4783 scope.go:117] "RemoveContainer" containerID="1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e" Sep 30 13:56:56 crc kubenswrapper[4783]: E0930 13:56:56.176954 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e\": container with ID starting with 1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e not found: ID does not exist" containerID="1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.176988 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e"} err="failed to get container status \"1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e\": rpc error: code = NotFound desc = could not find container \"1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e\": container with ID starting with 1652491019d436f8f721dab014615fd6476ed0d3b235b982ff8880bb0917fc6e not found: ID does not exist" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.185953 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.194643 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.194721 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config" (OuterVolumeSpecName: "config") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.198665 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.198691 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.198701 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6h2n\" (UniqueName: \"kubernetes.io/projected/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-kube-api-access-v6h2n\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.198712 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.202989 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.207549 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f29adc0-f647-4bb9-98fc-8124c7f30a2d" (UID: "9f29adc0-f647-4bb9-98fc-8124c7f30a2d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.257052 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.300885 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.300942 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f29adc0-f647-4bb9-98fc-8124c7f30a2d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.437754 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.445718 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c47bb5d77-b72ll"] Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.608377 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:56:56 crc kubenswrapper[4783]: I0930 13:56:56.854464 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" path="/var/lib/kubelet/pods/9f29adc0-f647-4bb9-98fc-8124c7f30a2d/volumes" Sep 30 13:56:57 crc kubenswrapper[4783]: I0930 13:56:57.103367 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-log" containerID="cri-o://2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b" gracePeriod=30 Sep 30 13:56:57 crc kubenswrapper[4783]: I0930 13:56:57.103417 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-api" containerID="cri-o://1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616" gracePeriod=30 Sep 30 13:56:58 crc kubenswrapper[4783]: I0930 13:56:58.115711 4783 generic.go:334] "Generic (PLEG): container finished" podID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerID="2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b" exitCode=143 Sep 30 13:56:58 crc kubenswrapper[4783]: I0930 13:56:58.115810 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerDied","Data":"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b"} Sep 30 13:56:58 crc kubenswrapper[4783]: I0930 13:56:58.118576 4783 generic.go:334] "Generic (PLEG): container finished" podID="523d7b8c-32a8-4235-b665-b657176a8001" containerID="d40de3bbab8c0ae54ee3ede870b97a3dc0b58b8f8e8ce00329d52caf9cd21044" exitCode=0 Sep 30 13:56:58 crc kubenswrapper[4783]: I0930 13:56:58.118677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mlmst" event={"ID":"523d7b8c-32a8-4235-b665-b657176a8001","Type":"ContainerDied","Data":"d40de3bbab8c0ae54ee3ede870b97a3dc0b58b8f8e8ce00329d52caf9cd21044"} Sep 30 13:56:58 crc kubenswrapper[4783]: I0930 13:56:58.118793 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerName="nova-scheduler-scheduler" containerID="cri-o://92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" gracePeriod=30 Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.531279 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.666257 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts\") pod \"523d7b8c-32a8-4235-b665-b657176a8001\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.666328 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data\") pod \"523d7b8c-32a8-4235-b665-b657176a8001\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.666356 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf5r9\" (UniqueName: \"kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9\") pod \"523d7b8c-32a8-4235-b665-b657176a8001\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.666425 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle\") pod \"523d7b8c-32a8-4235-b665-b657176a8001\" (UID: \"523d7b8c-32a8-4235-b665-b657176a8001\") " Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.672809 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9" (OuterVolumeSpecName: "kube-api-access-kf5r9") pod "523d7b8c-32a8-4235-b665-b657176a8001" (UID: "523d7b8c-32a8-4235-b665-b657176a8001"). InnerVolumeSpecName "kube-api-access-kf5r9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.672920 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts" (OuterVolumeSpecName: "scripts") pod "523d7b8c-32a8-4235-b665-b657176a8001" (UID: "523d7b8c-32a8-4235-b665-b657176a8001"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.697972 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "523d7b8c-32a8-4235-b665-b657176a8001" (UID: "523d7b8c-32a8-4235-b665-b657176a8001"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.705959 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data" (OuterVolumeSpecName: "config-data") pod "523d7b8c-32a8-4235-b665-b657176a8001" (UID: "523d7b8c-32a8-4235-b665-b657176a8001"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.768441 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.768477 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.768490 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf5r9\" (UniqueName: \"kubernetes.io/projected/523d7b8c-32a8-4235-b665-b657176a8001-kube-api-access-kf5r9\") on node \"crc\" DevicePath \"\"" Sep 30 13:56:59 crc kubenswrapper[4783]: I0930 13:56:59.768500 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/523d7b8c-32a8-4235-b665-b657176a8001-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.144299 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mlmst" event={"ID":"523d7b8c-32a8-4235-b665-b657176a8001","Type":"ContainerDied","Data":"7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1"} Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.144345 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c0f20fbe78696d70cddd83e21a22becfee03301aa03fa173dfef325c2f83aa1" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.144696 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mlmst" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.247014 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.248177 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="init" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.248316 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="init" Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.248431 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe5df32-3b3d-486a-abe4-0e04c91c54c6" containerName="nova-manage" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.248533 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe5df32-3b3d-486a-abe4-0e04c91c54c6" containerName="nova-manage" Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.248671 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523d7b8c-32a8-4235-b665-b657176a8001" containerName="nova-cell1-conductor-db-sync" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.248762 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="523d7b8c-32a8-4235-b665-b657176a8001" containerName="nova-cell1-conductor-db-sync" Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.248849 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="dnsmasq-dns" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.251570 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="dnsmasq-dns" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.253976 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="dnsmasq-dns" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.254023 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="523d7b8c-32a8-4235-b665-b657176a8001" containerName="nova-cell1-conductor-db-sync" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.254051 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fe5df32-3b3d-486a-abe4-0e04c91c54c6" containerName="nova-manage" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.255741 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.266788 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.277846 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.379019 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.380354 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvfw7\" (UniqueName: \"kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.380422 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.380535 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.380679 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.382334 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:00 crc kubenswrapper[4783]: E0930 13:57:00.382434 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerName="nova-scheduler-scheduler" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.482789 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.482862 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvfw7\" (UniqueName: \"kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.482914 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.487710 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.488687 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.501625 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvfw7\" (UniqueName: \"kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7\") pod \"nova-cell1-conductor-0\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.615922 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:00 crc kubenswrapper[4783]: I0930 13:57:00.838821 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6c47bb5d77-b72ll" podUID="9f29adc0-f647-4bb9-98fc-8124c7f30a2d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.166:5353: i/o timeout" Sep 30 13:57:01 crc kubenswrapper[4783]: I0930 13:57:01.047050 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:57:01 crc kubenswrapper[4783]: I0930 13:57:01.156299 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"563b20bf-7587-442c-86c5-1cbb179a2bf6","Type":"ContainerStarted","Data":"355342254b8f408d1a01efe785631003dd502026b7bb55b8dd41f5156e76c055"} Sep 30 13:57:01 crc kubenswrapper[4783]: I0930 13:57:01.507546 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.017714 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.095001 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.118622 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data\") pod \"50057626-1b68-42b2-8f24-d6f4a8e13525\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.118656 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpqv9\" (UniqueName: \"kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9\") pod \"50057626-1b68-42b2-8f24-d6f4a8e13525\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.118695 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle\") pod \"50057626-1b68-42b2-8f24-d6f4a8e13525\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.118772 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs\") pod \"50057626-1b68-42b2-8f24-d6f4a8e13525\" (UID: \"50057626-1b68-42b2-8f24-d6f4a8e13525\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.119668 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs" (OuterVolumeSpecName: "logs") pod "50057626-1b68-42b2-8f24-d6f4a8e13525" (UID: "50057626-1b68-42b2-8f24-d6f4a8e13525"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.134924 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9" (OuterVolumeSpecName: "kube-api-access-cpqv9") pod "50057626-1b68-42b2-8f24-d6f4a8e13525" (UID: "50057626-1b68-42b2-8f24-d6f4a8e13525"). InnerVolumeSpecName "kube-api-access-cpqv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.150121 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data" (OuterVolumeSpecName: "config-data") pod "50057626-1b68-42b2-8f24-d6f4a8e13525" (UID: "50057626-1b68-42b2-8f24-d6f4a8e13525"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.166266 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50057626-1b68-42b2-8f24-d6f4a8e13525" (UID: "50057626-1b68-42b2-8f24-d6f4a8e13525"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.167510 4783 generic.go:334] "Generic (PLEG): container finished" podID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" exitCode=0 Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.167617 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf","Type":"ContainerDied","Data":"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679"} Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.167642 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf","Type":"ContainerDied","Data":"00e35931a1b61670f0905f043738e8a9194025059af5084bb775d5a1dc221d5f"} Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.167688 4783 scope.go:117] "RemoveContainer" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.167792 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.176255 4783 generic.go:334] "Generic (PLEG): container finished" podID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerID="1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616" exitCode=0 Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.176296 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerDied","Data":"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616"} Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.176312 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"50057626-1b68-42b2-8f24-d6f4a8e13525","Type":"ContainerDied","Data":"6051fadac4c90e9e99f82540facd5de4093a2e5997a9f240be5fdcb6fccc71d1"} Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.176353 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.192095 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"563b20bf-7587-442c-86c5-1cbb179a2bf6","Type":"ContainerStarted","Data":"263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852"} Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.192188 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.193823 4783 scope.go:117] "RemoveContainer" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.194150 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679\": container with ID starting with 92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679 not found: ID does not exist" containerID="92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.194179 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679"} err="failed to get container status \"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679\": rpc error: code = NotFound desc = could not find container \"92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679\": container with ID starting with 92f6286b80591576f8607c36d3fb7608e6c79b291d09aeefdf7ad88eda881679 not found: ID does not exist" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.194197 4783 scope.go:117] "RemoveContainer" containerID="1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.216575 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.216555695 podStartE2EDuration="2.216555695s" podCreationTimestamp="2025-09-30 13:57:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:02.214165358 +0000 UTC m=+1322.145631685" watchObservedRunningTime="2025-09-30 13:57:02.216555695 +0000 UTC m=+1322.148022002" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.224538 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data\") pod \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.225020 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kxh2\" (UniqueName: \"kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2\") pod \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.225122 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle\") pod \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\" (UID: \"8d0efc6a-8957-4ef5-ae26-866fd64a3fbf\") " Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.226299 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.226368 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpqv9\" (UniqueName: \"kubernetes.io/projected/50057626-1b68-42b2-8f24-d6f4a8e13525-kube-api-access-cpqv9\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.226385 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50057626-1b68-42b2-8f24-d6f4a8e13525-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.226418 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50057626-1b68-42b2-8f24-d6f4a8e13525-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.229202 4783 scope.go:117] "RemoveContainer" containerID="2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.233271 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2" (OuterVolumeSpecName: "kube-api-access-5kxh2") pod "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" (UID: "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf"). InnerVolumeSpecName "kube-api-access-5kxh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.245769 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.257181 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.265198 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data" (OuterVolumeSpecName: "config-data") pod "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" (UID: "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.273279 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.274503 4783 scope.go:117] "RemoveContainer" containerID="1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276390 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" (UID: "8d0efc6a-8957-4ef5-ae26-866fd64a3fbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.276506 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-log" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276525 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-log" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.276539 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerName="nova-scheduler-scheduler" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276563 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerName="nova-scheduler-scheduler" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.276578 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-api" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276583 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-api" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276807 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-api" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276828 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" containerName="nova-scheduler-scheduler" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.276855 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" containerName="nova-api-log" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.276872 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616\": container with ID starting with 1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616 not found: ID does not exist" containerID="1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.277029 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616"} err="failed to get container status \"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616\": rpc error: code = NotFound desc = could not find container \"1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616\": container with ID starting with 1d23ce1df0a91c4369f663ec3f32c6e8e47e627220be507db99b21e8a7b08616 not found: ID does not exist" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.277060 4783 scope.go:117] "RemoveContainer" containerID="2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b" Sep 30 13:57:02 crc kubenswrapper[4783]: E0930 13:57:02.277306 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b\": container with ID starting with 2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b not found: ID does not exist" containerID="2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.277327 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b"} err="failed to get container status \"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b\": rpc error: code = NotFound desc = could not find container \"2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b\": container with ID starting with 2f7a42fd8ceaf94e97b6cf43a970b48b8b2e7468dee05fd9dcc345a8233a919b not found: ID does not exist" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.277940 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.280951 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.284376 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.328599 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kxh2\" (UniqueName: \"kubernetes.io/projected/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-kube-api-access-5kxh2\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.328631 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.328642 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.429758 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzxch\" (UniqueName: \"kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.429826 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.430248 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.430341 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.561651 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.581308 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzxch\" (UniqueName: \"kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.581462 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.581929 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.581982 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.602654 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.606599 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.622863 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.638045 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.640688 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzxch\" (UniqueName: \"kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch\") pod \"nova-api-0\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " pod="openstack/nova-api-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.644244 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.676420 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.682580 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.683929 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.683975 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x8qx\" (UniqueName: \"kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.684035 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.693624 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.786191 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.786514 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x8qx\" (UniqueName: \"kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.786711 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.789940 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.790136 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.812657 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x8qx\" (UniqueName: \"kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx\") pod \"nova-scheduler-0\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.853963 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50057626-1b68-42b2-8f24-d6f4a8e13525" path="/var/lib/kubelet/pods/50057626-1b68-42b2-8f24-d6f4a8e13525/volumes" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.854712 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d0efc6a-8957-4ef5-ae26-866fd64a3fbf" path="/var/lib/kubelet/pods/8d0efc6a-8957-4ef5-ae26-866fd64a3fbf/volumes" Sep 30 13:57:02 crc kubenswrapper[4783]: I0930 13:57:02.900497 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:03 crc kubenswrapper[4783]: I0930 13:57:03.022944 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:03 crc kubenswrapper[4783]: I0930 13:57:03.391068 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:03 crc kubenswrapper[4783]: I0930 13:57:03.487696 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.216024 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5a4f929b-b804-451c-9dd4-c9e2420b2c0b","Type":"ContainerStarted","Data":"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628"} Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.216446 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5a4f929b-b804-451c-9dd4-c9e2420b2c0b","Type":"ContainerStarted","Data":"57f80f61e091ac2dfe95e3763b404635b161d9ed34844d4dbc21c73bf7c2ad18"} Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.220264 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerStarted","Data":"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0"} Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.220317 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerStarted","Data":"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e"} Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.220333 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerStarted","Data":"53a50e93de3dc265e819d5b1fc7b5868a125a62a05c06d9b30e564fbcebb60a4"} Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.239883 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.239863177 podStartE2EDuration="2.239863177s" podCreationTimestamp="2025-09-30 13:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:04.232989517 +0000 UTC m=+1324.164455844" watchObservedRunningTime="2025-09-30 13:57:04.239863177 +0000 UTC m=+1324.171329484" Sep 30 13:57:04 crc kubenswrapper[4783]: I0930 13:57:04.251080 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.251046865 podStartE2EDuration="2.251046865s" podCreationTimestamp="2025-09-30 13:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:04.250061503 +0000 UTC m=+1324.181527810" watchObservedRunningTime="2025-09-30 13:57:04.251046865 +0000 UTC m=+1324.182513172" Sep 30 13:57:05 crc kubenswrapper[4783]: I0930 13:57:05.430266 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:05 crc kubenswrapper[4783]: I0930 13:57:05.430652 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="7732b547-1797-4164-ad03-6c76c2c1f207" containerName="kube-state-metrics" containerID="cri-o://12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e" gracePeriod=30 Sep 30 13:57:05 crc kubenswrapper[4783]: I0930 13:57:05.909735 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.041616 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grks2\" (UniqueName: \"kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2\") pod \"7732b547-1797-4164-ad03-6c76c2c1f207\" (UID: \"7732b547-1797-4164-ad03-6c76c2c1f207\") " Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.048608 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2" (OuterVolumeSpecName: "kube-api-access-grks2") pod "7732b547-1797-4164-ad03-6c76c2c1f207" (UID: "7732b547-1797-4164-ad03-6c76c2c1f207"). InnerVolumeSpecName "kube-api-access-grks2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.144612 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grks2\" (UniqueName: \"kubernetes.io/projected/7732b547-1797-4164-ad03-6c76c2c1f207-kube-api-access-grks2\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.240460 4783 generic.go:334] "Generic (PLEG): container finished" podID="7732b547-1797-4164-ad03-6c76c2c1f207" containerID="12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e" exitCode=2 Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.240512 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.240518 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7732b547-1797-4164-ad03-6c76c2c1f207","Type":"ContainerDied","Data":"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e"} Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.240599 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7732b547-1797-4164-ad03-6c76c2c1f207","Type":"ContainerDied","Data":"f1ca784a1b258d08929882e0ec37237510d87717ca2a8c2b5c4059c9fefaefd9"} Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.240662 4783 scope.go:117] "RemoveContainer" containerID="12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.284954 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.293799 4783 scope.go:117] "RemoveContainer" containerID="12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e" Sep 30 13:57:06 crc kubenswrapper[4783]: E0930 13:57:06.296400 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e\": container with ID starting with 12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e not found: ID does not exist" containerID="12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.296453 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e"} err="failed to get container status \"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e\": rpc error: code = NotFound desc = could not find container \"12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e\": container with ID starting with 12ae70be391cbc8dcb6e2645d25ed4334ed8a24aa2ed55ba65a7d836ad84b67e not found: ID does not exist" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.305040 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.320092 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:06 crc kubenswrapper[4783]: E0930 13:57:06.320563 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7732b547-1797-4164-ad03-6c76c2c1f207" containerName="kube-state-metrics" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.320584 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7732b547-1797-4164-ad03-6c76c2c1f207" containerName="kube-state-metrics" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.320827 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7732b547-1797-4164-ad03-6c76c2c1f207" containerName="kube-state-metrics" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.321681 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.326084 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.326335 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.330711 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.452494 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pp4s\" (UniqueName: \"kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.452569 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.452592 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.452678 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.553879 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.554000 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pp4s\" (UniqueName: \"kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.554032 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.554054 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.559973 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.560096 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.567637 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.590270 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pp4s\" (UniqueName: \"kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s\") pod \"kube-state-metrics-0\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.642817 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:57:06 crc kubenswrapper[4783]: I0930 13:57:06.865159 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7732b547-1797-4164-ad03-6c76c2c1f207" path="/var/lib/kubelet/pods/7732b547-1797-4164-ad03-6c76c2c1f207/volumes" Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.111096 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.207483 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.207769 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-central-agent" containerID="cri-o://54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0" gracePeriod=30 Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.207838 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="proxy-httpd" containerID="cri-o://59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0" gracePeriod=30 Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.207880 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-notification-agent" containerID="cri-o://7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165" gracePeriod=30 Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.207832 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="sg-core" containerID="cri-o://d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe" gracePeriod=30 Sep 30 13:57:07 crc kubenswrapper[4783]: I0930 13:57:07.263455 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f381dd59-999a-4cd2-8dc1-d0faea63df2c","Type":"ContainerStarted","Data":"c51e5dfa9f8eeb3a45ac6098eb9eb5a9817b6073ae4994d0e9dda1f3bda7900f"} Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.023874 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279265 4783 generic.go:334] "Generic (PLEG): container finished" podID="d104d752-45e0-4a40-94e0-12366cbd1327" containerID="59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0" exitCode=0 Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279318 4783 generic.go:334] "Generic (PLEG): container finished" podID="d104d752-45e0-4a40-94e0-12366cbd1327" containerID="d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe" exitCode=2 Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279320 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerDied","Data":"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0"} Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279374 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerDied","Data":"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe"} Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279390 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerDied","Data":"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0"} Sep 30 13:57:08 crc kubenswrapper[4783]: I0930 13:57:08.279334 4783 generic.go:334] "Generic (PLEG): container finished" podID="d104d752-45e0-4a40-94e0-12366cbd1327" containerID="54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0" exitCode=0 Sep 30 13:57:09 crc kubenswrapper[4783]: I0930 13:57:09.291730 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f381dd59-999a-4cd2-8dc1-d0faea63df2c","Type":"ContainerStarted","Data":"1a2d3e8f3026d5fdf54bd68bc0e37a6babc472452cfbc73d9d47792a666e28be"} Sep 30 13:57:09 crc kubenswrapper[4783]: I0930 13:57:09.293025 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 13:57:09 crc kubenswrapper[4783]: I0930 13:57:09.310099 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.467557707 podStartE2EDuration="3.310080654s" podCreationTimestamp="2025-09-30 13:57:06 +0000 UTC" firstStartedPulling="2025-09-30 13:57:07.117527995 +0000 UTC m=+1327.048994302" lastFinishedPulling="2025-09-30 13:57:07.960050922 +0000 UTC m=+1327.891517249" observedRunningTime="2025-09-30 13:57:09.307729068 +0000 UTC m=+1329.239195395" watchObservedRunningTime="2025-09-30 13:57:09.310080654 +0000 UTC m=+1329.241546981" Sep 30 13:57:10 crc kubenswrapper[4783]: I0930 13:57:10.651241 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 13:57:12 crc kubenswrapper[4783]: I0930 13:57:12.900988 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:12 crc kubenswrapper[4783]: I0930 13:57:12.901364 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:13 crc kubenswrapper[4783]: I0930 13:57:13.023986 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 13:57:13 crc kubenswrapper[4783]: I0930 13:57:13.055392 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 13:57:13 crc kubenswrapper[4783]: I0930 13:57:13.367208 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 13:57:13 crc kubenswrapper[4783]: I0930 13:57:13.984474 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:13 crc kubenswrapper[4783]: I0930 13:57:13.985215 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:16 crc kubenswrapper[4783]: I0930 13:57:16.659047 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.001371 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147026 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147076 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147140 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147164 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147194 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147562 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147649 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147700 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2lhl\" (UniqueName: \"kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.147738 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.148456 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.148472 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d104d752-45e0-4a40-94e0-12366cbd1327-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.153290 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl" (OuterVolumeSpecName: "kube-api-access-b2lhl") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "kube-api-access-b2lhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.153506 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts" (OuterVolumeSpecName: "scripts") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.174838 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.242313 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.249511 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data" (OuterVolumeSpecName: "config-data") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250121 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") pod \"d104d752-45e0-4a40-94e0-12366cbd1327\" (UID: \"d104d752-45e0-4a40-94e0-12366cbd1327\") " Sep 30 13:57:17 crc kubenswrapper[4783]: W0930 13:57:17.250362 4783 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/d104d752-45e0-4a40-94e0-12366cbd1327/volumes/kubernetes.io~secret/config-data Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250394 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data" (OuterVolumeSpecName: "config-data") pod "d104d752-45e0-4a40-94e0-12366cbd1327" (UID: "d104d752-45e0-4a40-94e0-12366cbd1327"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250932 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250949 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250962 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2lhl\" (UniqueName: \"kubernetes.io/projected/d104d752-45e0-4a40-94e0-12366cbd1327-kube-api-access-b2lhl\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250971 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.250982 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d104d752-45e0-4a40-94e0-12366cbd1327-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.375270 4783 generic.go:334] "Generic (PLEG): container finished" podID="d104d752-45e0-4a40-94e0-12366cbd1327" containerID="7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165" exitCode=0 Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.375311 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerDied","Data":"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165"} Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.375348 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d104d752-45e0-4a40-94e0-12366cbd1327","Type":"ContainerDied","Data":"fc20c88679e764b942adb45e7d73289fbd19f49ad60261bd693a14e0e07630a1"} Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.375365 4783 scope.go:117] "RemoveContainer" containerID="59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.375425 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.395825 4783 scope.go:117] "RemoveContainer" containerID="d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.421935 4783 scope.go:117] "RemoveContainer" containerID="7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.440043 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.453250 4783 scope.go:117] "RemoveContainer" containerID="54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.454512 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.465673 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.466147 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="sg-core" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466169 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="sg-core" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.466188 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="proxy-httpd" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466197 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="proxy-httpd" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.466234 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-notification-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466244 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-notification-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.466497 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-central-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466507 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-central-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466749 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="sg-core" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466778 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-notification-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466800 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="ceilometer-central-agent" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.466824 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" containerName="proxy-httpd" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.469136 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.471878 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.472180 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.472253 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.479328 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.479578 4783 scope.go:117] "RemoveContainer" containerID="59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.480358 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0\": container with ID starting with 59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0 not found: ID does not exist" containerID="59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.480416 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0"} err="failed to get container status \"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0\": rpc error: code = NotFound desc = could not find container \"59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0\": container with ID starting with 59df6d2fd0fc5c4f7323c113df6b47338d461f4efa5f9dce24af42a72d443fe0 not found: ID does not exist" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.480440 4783 scope.go:117] "RemoveContainer" containerID="d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.480927 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe\": container with ID starting with d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe not found: ID does not exist" containerID="d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.480990 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe"} err="failed to get container status \"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe\": rpc error: code = NotFound desc = could not find container \"d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe\": container with ID starting with d2a528d9d102943b1ca191d8efe67b812216952209bf66bddcaad37773d7b6fe not found: ID does not exist" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.481060 4783 scope.go:117] "RemoveContainer" containerID="7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.481389 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165\": container with ID starting with 7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165 not found: ID does not exist" containerID="7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.481419 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165"} err="failed to get container status \"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165\": rpc error: code = NotFound desc = could not find container \"7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165\": container with ID starting with 7ca69fbe1567759db94a87e797c95b12e88682df905c30a0602c6c57d4051165 not found: ID does not exist" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.481441 4783 scope.go:117] "RemoveContainer" containerID="54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0" Sep 30 13:57:17 crc kubenswrapper[4783]: E0930 13:57:17.481668 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0\": container with ID starting with 54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0 not found: ID does not exist" containerID="54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.481687 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0"} err="failed to get container status \"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0\": rpc error: code = NotFound desc = could not find container \"54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0\": container with ID starting with 54661fa5491fd16ae834e43ce4d28ec1d857ce9b14702f8bd4ac285c5ab828f0 not found: ID does not exist" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556550 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556614 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556635 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556769 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556794 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g8gx\" (UniqueName: \"kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556812 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556838 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.556856 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658000 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658043 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g8gx\" (UniqueName: \"kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658070 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658104 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658130 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658240 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658286 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658312 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.658956 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.659155 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.662712 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.663370 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.664427 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.664610 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.665023 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.674014 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g8gx\" (UniqueName: \"kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx\") pod \"ceilometer-0\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " pod="openstack/ceilometer-0" Sep 30 13:57:17 crc kubenswrapper[4783]: I0930 13:57:17.800061 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:18 crc kubenswrapper[4783]: I0930 13:57:18.284632 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:18 crc kubenswrapper[4783]: I0930 13:57:18.387017 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerStarted","Data":"c894568d974cc0b881707fbb8dfcff712d5aa1ef2a8732d8b750b99ba1829f4d"} Sep 30 13:57:18 crc kubenswrapper[4783]: I0930 13:57:18.854686 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d104d752-45e0-4a40-94e0-12366cbd1327" path="/var/lib/kubelet/pods/d104d752-45e0-4a40-94e0-12366cbd1327/volumes" Sep 30 13:57:20 crc kubenswrapper[4783]: E0930 13:57:20.377945 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2f282a_74c3_4823_a2cb_7c002d18eedb.slice/crio-conmon-b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfec8c6a1_7435_4f3e_b77b_50fe20220c2a.slice/crio-conmon-73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2f282a_74c3_4823_a2cb_7c002d18eedb.slice/crio-b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572.scope\": RecentStats: unable to find data in memory cache]" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.415480 4783 generic.go:334] "Generic (PLEG): container finished" podID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" containerID="b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572" exitCode=137 Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.415567 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f2f282a-74c3-4823-a2cb-7c002d18eedb","Type":"ContainerDied","Data":"b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572"} Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.417382 4783 generic.go:334] "Generic (PLEG): container finished" podID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerID="73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412" exitCode=137 Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.417428 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerDied","Data":"73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412"} Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.417448 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fec8c6a1-7435-4f3e-b77b-50fe20220c2a","Type":"ContainerDied","Data":"3db5dbfa3106c115a71b0cc72d738118c755ced7f2a3917604a28a2e53480152"} Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.417460 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3db5dbfa3106c115a71b0cc72d738118c755ced7f2a3917604a28a2e53480152" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.418835 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerStarted","Data":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.469261 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.528930 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624400 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxs9j\" (UniqueName: \"kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j\") pod \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624523 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data\") pod \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624601 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle\") pod \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624631 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs\") pod \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\" (UID: \"fec8c6a1-7435-4f3e-b77b-50fe20220c2a\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624664 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle\") pod \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.624906 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs" (OuterVolumeSpecName: "logs") pod "fec8c6a1-7435-4f3e-b77b-50fe20220c2a" (UID: "fec8c6a1-7435-4f3e-b77b-50fe20220c2a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.626784 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27vmq\" (UniqueName: \"kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq\") pod \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.626916 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data\") pod \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\" (UID: \"5f2f282a-74c3-4823-a2cb-7c002d18eedb\") " Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.627540 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.630967 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq" (OuterVolumeSpecName: "kube-api-access-27vmq") pod "5f2f282a-74c3-4823-a2cb-7c002d18eedb" (UID: "5f2f282a-74c3-4823-a2cb-7c002d18eedb"). InnerVolumeSpecName "kube-api-access-27vmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.634377 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j" (OuterVolumeSpecName: "kube-api-access-jxs9j") pod "fec8c6a1-7435-4f3e-b77b-50fe20220c2a" (UID: "fec8c6a1-7435-4f3e-b77b-50fe20220c2a"). InnerVolumeSpecName "kube-api-access-jxs9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.658854 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data" (OuterVolumeSpecName: "config-data") pod "fec8c6a1-7435-4f3e-b77b-50fe20220c2a" (UID: "fec8c6a1-7435-4f3e-b77b-50fe20220c2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.659745 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f2f282a-74c3-4823-a2cb-7c002d18eedb" (UID: "5f2f282a-74c3-4823-a2cb-7c002d18eedb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.664817 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fec8c6a1-7435-4f3e-b77b-50fe20220c2a" (UID: "fec8c6a1-7435-4f3e-b77b-50fe20220c2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.669765 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data" (OuterVolumeSpecName: "config-data") pod "5f2f282a-74c3-4823-a2cb-7c002d18eedb" (UID: "5f2f282a-74c3-4823-a2cb-7c002d18eedb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729014 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729054 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729068 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27vmq\" (UniqueName: \"kubernetes.io/projected/5f2f282a-74c3-4823-a2cb-7c002d18eedb-kube-api-access-27vmq\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729083 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2f282a-74c3-4823-a2cb-7c002d18eedb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729096 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxs9j\" (UniqueName: \"kubernetes.io/projected/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-kube-api-access-jxs9j\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:20 crc kubenswrapper[4783]: I0930 13:57:20.729107 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec8c6a1-7435-4f3e-b77b-50fe20220c2a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.442777 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f2f282a-74c3-4823-a2cb-7c002d18eedb","Type":"ContainerDied","Data":"aaa549b6b271542c831a0317352698f5fb6c91f3ba447ba08bbaf3cdeaff8512"} Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.443090 4783 scope.go:117] "RemoveContainer" containerID="b943f935378c46657faf26337ffbeab32303c97f34be6ae4d529223a08f15572" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.442874 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.447850 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.448420 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerStarted","Data":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.542680 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.556486 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.590006 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.597901 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606023 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: E0930 13:57:21.606541 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606566 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:57:21 crc kubenswrapper[4783]: E0930 13:57:21.606582 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-metadata" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606591 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-metadata" Sep 30 13:57:21 crc kubenswrapper[4783]: E0930 13:57:21.606623 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-log" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606633 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-log" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606872 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-metadata" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606900 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.606917 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" containerName="nova-metadata-log" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.607695 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.614289 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.614899 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.614906 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.615026 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.620975 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.621066 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.625366 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.625777 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.630170 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751108 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751196 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751247 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751277 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751300 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc8d7\" (UniqueName: \"kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751325 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751346 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751570 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751634 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.751755 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjvss\" (UniqueName: \"kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853269 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjvss\" (UniqueName: \"kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853414 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853480 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853524 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853545 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853570 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc8d7\" (UniqueName: \"kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853590 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853609 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853661 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.853678 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.854099 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.861749 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.862513 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.864251 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.864740 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.864854 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.865519 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.865936 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.883860 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjvss\" (UniqueName: \"kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss\") pod \"nova-cell1-novncproxy-0\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.887906 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc8d7\" (UniqueName: \"kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7\") pod \"nova-metadata-0\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " pod="openstack/nova-metadata-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.938274 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:21 crc kubenswrapper[4783]: I0930 13:57:21.959893 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.405640 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.454046 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.462239 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerStarted","Data":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.465301 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d61c8e26-064d-430a-8bb8-4e3c5e192d3a","Type":"ContainerStarted","Data":"ee552533f4539c436189d186f03bfe95d3ee6b29574a4538a5ff66ebdbee79c1"} Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.854296 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f2f282a-74c3-4823-a2cb-7c002d18eedb" path="/var/lib/kubelet/pods/5f2f282a-74c3-4823-a2cb-7c002d18eedb/volumes" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.855299 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fec8c6a1-7435-4f3e-b77b-50fe20220c2a" path="/var/lib/kubelet/pods/fec8c6a1-7435-4f3e-b77b-50fe20220c2a/volumes" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.905641 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.905710 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.906499 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.906533 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.910844 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 13:57:22 crc kubenswrapper[4783]: I0930 13:57:22.910893 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.104324 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.107486 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.129349 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.190611 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.190721 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.190777 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2m8z\" (UniqueName: \"kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.190862 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.190930 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.191012 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.292673 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2m8z\" (UniqueName: \"kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.292884 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.292946 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.293005 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.293075 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.293135 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.294007 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.294038 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.294041 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.294150 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.294397 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.312582 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2m8z\" (UniqueName: \"kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z\") pod \"dnsmasq-dns-cc449b9dc-br2xm\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.441376 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.497460 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerStarted","Data":"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba"} Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.497506 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerStarted","Data":"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040"} Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.497520 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerStarted","Data":"686e8122e29dd65e065a678c80154ea6197f21ddec951880f157574ec3ef8cbe"} Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.531878 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d61c8e26-064d-430a-8bb8-4e3c5e192d3a","Type":"ContainerStarted","Data":"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f"} Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.579699 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.579675568 podStartE2EDuration="2.579675568s" podCreationTimestamp="2025-09-30 13:57:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:23.532388617 +0000 UTC m=+1343.463854924" watchObservedRunningTime="2025-09-30 13:57:23.579675568 +0000 UTC m=+1343.511141895" Sep 30 13:57:23 crc kubenswrapper[4783]: I0930 13:57:23.596165 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.596142445 podStartE2EDuration="2.596142445s" podCreationTimestamp="2025-09-30 13:57:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:23.562181169 +0000 UTC m=+1343.493647476" watchObservedRunningTime="2025-09-30 13:57:23.596142445 +0000 UTC m=+1343.527608752" Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.055436 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:57:24 crc kubenswrapper[4783]: W0930 13:57:24.060838 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod284aafcd_4081_400f_a1c3_9992b3557fc1.slice/crio-75f305b6d3b27d3d91d0441124a5fa6bb9515c64a7c940a27c2a4f75ef66bbb8 WatchSource:0}: Error finding container 75f305b6d3b27d3d91d0441124a5fa6bb9515c64a7c940a27c2a4f75ef66bbb8: Status 404 returned error can't find the container with id 75f305b6d3b27d3d91d0441124a5fa6bb9515c64a7c940a27c2a4f75ef66bbb8 Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.540312 4783 generic.go:334] "Generic (PLEG): container finished" podID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerID="dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f" exitCode=0 Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.540520 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" event={"ID":"284aafcd-4081-400f-a1c3-9992b3557fc1","Type":"ContainerDied","Data":"dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f"} Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.540677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" event={"ID":"284aafcd-4081-400f-a1c3-9992b3557fc1","Type":"ContainerStarted","Data":"75f305b6d3b27d3d91d0441124a5fa6bb9515c64a7c940a27c2a4f75ef66bbb8"} Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.543919 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerStarted","Data":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.544201 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:57:24 crc kubenswrapper[4783]: I0930 13:57:24.598422 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.470632231 podStartE2EDuration="7.598403076s" podCreationTimestamp="2025-09-30 13:57:17 +0000 UTC" firstStartedPulling="2025-09-30 13:57:18.296498748 +0000 UTC m=+1338.227965065" lastFinishedPulling="2025-09-30 13:57:23.424269603 +0000 UTC m=+1343.355735910" observedRunningTime="2025-09-30 13:57:24.597670282 +0000 UTC m=+1344.529136589" watchObservedRunningTime="2025-09-30 13:57:24.598403076 +0000 UTC m=+1344.529869383" Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.554812 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" event={"ID":"284aafcd-4081-400f-a1c3-9992b3557fc1","Type":"ContainerStarted","Data":"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14"} Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.555096 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.580344 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" podStartSLOduration=2.580326196 podStartE2EDuration="2.580326196s" podCreationTimestamp="2025-09-30 13:57:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:25.574043485 +0000 UTC m=+1345.505509792" watchObservedRunningTime="2025-09-30 13:57:25.580326196 +0000 UTC m=+1345.511792503" Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.912445 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.912838 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-log" containerID="cri-o://5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e" gracePeriod=30 Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.912916 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-api" containerID="cri-o://50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0" gracePeriod=30 Sep 30 13:57:25 crc kubenswrapper[4783]: I0930 13:57:25.935480 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:26 crc kubenswrapper[4783]: I0930 13:57:26.565461 4783 generic.go:334] "Generic (PLEG): container finished" podID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerID="5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e" exitCode=143 Sep 30 13:57:26 crc kubenswrapper[4783]: I0930 13:57:26.565542 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerDied","Data":"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e"} Sep 30 13:57:26 crc kubenswrapper[4783]: I0930 13:57:26.939152 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:26 crc kubenswrapper[4783]: I0930 13:57:26.960580 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:57:26 crc kubenswrapper[4783]: I0930 13:57:26.960705 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:57:27 crc kubenswrapper[4783]: I0930 13:57:27.573453 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-central-agent" containerID="cri-o://16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" gracePeriod=30 Sep 30 13:57:27 crc kubenswrapper[4783]: I0930 13:57:27.573476 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="proxy-httpd" containerID="cri-o://9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" gracePeriod=30 Sep 30 13:57:27 crc kubenswrapper[4783]: I0930 13:57:27.573559 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="sg-core" containerID="cri-o://384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" gracePeriod=30 Sep 30 13:57:27 crc kubenswrapper[4783]: I0930 13:57:27.573582 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-notification-agent" containerID="cri-o://a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" gracePeriod=30 Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.311315 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.408642 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.409821 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g8gx\" (UniqueName: \"kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.409957 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.410002 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.410054 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.410083 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.410162 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.410211 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle\") pod \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\" (UID: \"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8\") " Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.412640 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.413492 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.416374 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx" (OuterVolumeSpecName: "kube-api-access-6g8gx") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "kube-api-access-6g8gx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.439874 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts" (OuterVolumeSpecName: "scripts") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.495737 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.512910 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.512951 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g8gx\" (UniqueName: \"kubernetes.io/projected/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-kube-api-access-6g8gx\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.512968 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.512981 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.512996 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.533968 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.547362 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.570768 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data" (OuterVolumeSpecName: "config-data") pod "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" (UID: "2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.585946 4783 generic.go:334] "Generic (PLEG): container finished" podID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" exitCode=0 Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.585994 4783 generic.go:334] "Generic (PLEG): container finished" podID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" exitCode=2 Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586003 4783 generic.go:334] "Generic (PLEG): container finished" podID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" exitCode=0 Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586011 4783 generic.go:334] "Generic (PLEG): container finished" podID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" exitCode=0 Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586046 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerDied","Data":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586090 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerDied","Data":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586100 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerDied","Data":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586111 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerDied","Data":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586124 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8","Type":"ContainerDied","Data":"c894568d974cc0b881707fbb8dfcff712d5aa1ef2a8732d8b750b99ba1829f4d"} Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586143 4783 scope.go:117] "RemoveContainer" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.586385 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.620719 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.620764 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.620778 4783 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.626353 4783 scope.go:117] "RemoveContainer" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.632791 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.646577 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.655251 4783 scope.go:117] "RemoveContainer" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.660709 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.661360 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="proxy-httpd" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661382 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="proxy-httpd" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.661419 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-notification-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661429 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-notification-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.661467 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="sg-core" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661479 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="sg-core" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.661503 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-central-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661513 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-central-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661786 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-notification-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661825 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="proxy-httpd" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661847 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="sg-core" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.661867 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" containerName="ceilometer-central-agent" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.664209 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.668057 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.668086 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.669453 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.673020 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.710604 4783 scope.go:117] "RemoveContainer" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.722772 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.722862 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.722895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.722927 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.723115 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkfzl\" (UniqueName: \"kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.723171 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.723269 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.723311 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.759393 4783 scope.go:117] "RemoveContainer" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.760037 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": container with ID starting with 9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743 not found: ID does not exist" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.760119 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} err="failed to get container status \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": rpc error: code = NotFound desc = could not find container \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": container with ID starting with 9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.760197 4783 scope.go:117] "RemoveContainer" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.760755 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": container with ID starting with 384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc not found: ID does not exist" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.760800 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} err="failed to get container status \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": rpc error: code = NotFound desc = could not find container \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": container with ID starting with 384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.760836 4783 scope.go:117] "RemoveContainer" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.761576 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": container with ID starting with a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5 not found: ID does not exist" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.761624 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} err="failed to get container status \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": rpc error: code = NotFound desc = could not find container \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": container with ID starting with a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.761651 4783 scope.go:117] "RemoveContainer" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: E0930 13:57:28.762312 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": container with ID starting with 16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902 not found: ID does not exist" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.762357 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} err="failed to get container status \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": rpc error: code = NotFound desc = could not find container \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": container with ID starting with 16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.762377 4783 scope.go:117] "RemoveContainer" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.763435 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} err="failed to get container status \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": rpc error: code = NotFound desc = could not find container \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": container with ID starting with 9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.763460 4783 scope.go:117] "RemoveContainer" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.764081 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} err="failed to get container status \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": rpc error: code = NotFound desc = could not find container \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": container with ID starting with 384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.764109 4783 scope.go:117] "RemoveContainer" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.764444 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} err="failed to get container status \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": rpc error: code = NotFound desc = could not find container \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": container with ID starting with a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.764484 4783 scope.go:117] "RemoveContainer" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.765535 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} err="failed to get container status \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": rpc error: code = NotFound desc = could not find container \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": container with ID starting with 16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.765589 4783 scope.go:117] "RemoveContainer" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.765970 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} err="failed to get container status \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": rpc error: code = NotFound desc = could not find container \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": container with ID starting with 9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.766005 4783 scope.go:117] "RemoveContainer" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.766453 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} err="failed to get container status \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": rpc error: code = NotFound desc = could not find container \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": container with ID starting with 384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.766507 4783 scope.go:117] "RemoveContainer" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.767815 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} err="failed to get container status \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": rpc error: code = NotFound desc = could not find container \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": container with ID starting with a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.767874 4783 scope.go:117] "RemoveContainer" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.768562 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} err="failed to get container status \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": rpc error: code = NotFound desc = could not find container \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": container with ID starting with 16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.768608 4783 scope.go:117] "RemoveContainer" containerID="9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.768948 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743"} err="failed to get container status \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": rpc error: code = NotFound desc = could not find container \"9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743\": container with ID starting with 9727ff53dbe80bbae7325cbc268bcc07e9cf3adea8ea8243b6b48ae98c585743 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.768969 4783 scope.go:117] "RemoveContainer" containerID="384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.769340 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc"} err="failed to get container status \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": rpc error: code = NotFound desc = could not find container \"384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc\": container with ID starting with 384e695c79cdb9f0c9330eb1d38370dd7f4e3d8e9638975048545a368ce7a7cc not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.769368 4783 scope.go:117] "RemoveContainer" containerID="a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.769697 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5"} err="failed to get container status \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": rpc error: code = NotFound desc = could not find container \"a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5\": container with ID starting with a49cfdec7a6c7c6f685d0adbc2d4a1d1c8e909d99c192f0fed6754f989678bc5 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.769742 4783 scope.go:117] "RemoveContainer" containerID="16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.770079 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902"} err="failed to get container status \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": rpc error: code = NotFound desc = could not find container \"16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902\": container with ID starting with 16261120cb4573501c9c3101d806ea5dbd9386cbd98b7015ed67a067a119f902 not found: ID does not exist" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.824712 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.824785 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.824823 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825265 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825514 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkfzl\" (UniqueName: \"kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825556 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825605 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825635 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.825683 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.826348 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.828872 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.829169 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.829325 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.837931 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.840297 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.844720 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkfzl\" (UniqueName: \"kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl\") pod \"ceilometer-0\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " pod="openstack/ceilometer-0" Sep 30 13:57:28 crc kubenswrapper[4783]: I0930 13:57:28.855422 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8" path="/var/lib/kubelet/pods/2cc14b2e-7b0c-4fb6-a2ce-fea6449fadc8/volumes" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.045675 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.555139 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.568469 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:57:29 crc kubenswrapper[4783]: W0930 13:57:29.577151 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01c79a8c_fb3e_4675_8f73_8e7916e746cc.slice/crio-208d11a19d6ff72d9456dde76c744b6abb4da2bff39922e8b3720512d28531ab WatchSource:0}: Error finding container 208d11a19d6ff72d9456dde76c744b6abb4da2bff39922e8b3720512d28531ab: Status 404 returned error can't find the container with id 208d11a19d6ff72d9456dde76c744b6abb4da2bff39922e8b3720512d28531ab Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.609296 4783 generic.go:334] "Generic (PLEG): container finished" podID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerID="50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0" exitCode=0 Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.609381 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerDied","Data":"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0"} Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.609426 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6141ae2c-fd66-44ee-a430-33ba48bf8d27","Type":"ContainerDied","Data":"53a50e93de3dc265e819d5b1fc7b5868a125a62a05c06d9b30e564fbcebb60a4"} Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.609442 4783 scope.go:117] "RemoveContainer" containerID="50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.609581 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.617092 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerStarted","Data":"208d11a19d6ff72d9456dde76c744b6abb4da2bff39922e8b3720512d28531ab"} Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.642358 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle\") pod \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.642597 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzxch\" (UniqueName: \"kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch\") pod \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.642671 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs\") pod \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.642795 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data\") pod \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\" (UID: \"6141ae2c-fd66-44ee-a430-33ba48bf8d27\") " Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.651504 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch" (OuterVolumeSpecName: "kube-api-access-kzxch") pod "6141ae2c-fd66-44ee-a430-33ba48bf8d27" (UID: "6141ae2c-fd66-44ee-a430-33ba48bf8d27"). InnerVolumeSpecName "kube-api-access-kzxch". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.655340 4783 scope.go:117] "RemoveContainer" containerID="5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.655459 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs" (OuterVolumeSpecName: "logs") pod "6141ae2c-fd66-44ee-a430-33ba48bf8d27" (UID: "6141ae2c-fd66-44ee-a430-33ba48bf8d27"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.681288 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data" (OuterVolumeSpecName: "config-data") pod "6141ae2c-fd66-44ee-a430-33ba48bf8d27" (UID: "6141ae2c-fd66-44ee-a430-33ba48bf8d27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.693477 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6141ae2c-fd66-44ee-a430-33ba48bf8d27" (UID: "6141ae2c-fd66-44ee-a430-33ba48bf8d27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.698486 4783 scope.go:117] "RemoveContainer" containerID="50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0" Sep 30 13:57:29 crc kubenswrapper[4783]: E0930 13:57:29.699131 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0\": container with ID starting with 50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0 not found: ID does not exist" containerID="50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.699163 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0"} err="failed to get container status \"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0\": rpc error: code = NotFound desc = could not find container \"50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0\": container with ID starting with 50ffda0651fa5193affd858ba492eaf2639a8a1a6a772a4d676d79d3f22377d0 not found: ID does not exist" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.699184 4783 scope.go:117] "RemoveContainer" containerID="5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e" Sep 30 13:57:29 crc kubenswrapper[4783]: E0930 13:57:29.699674 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e\": container with ID starting with 5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e not found: ID does not exist" containerID="5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.700886 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e"} err="failed to get container status \"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e\": rpc error: code = NotFound desc = could not find container \"5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e\": container with ID starting with 5aecba9461943d65997cc0dccc81c1904ac9cf1065d1d7d3220f4ce43ea1fd2e not found: ID does not exist" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.747036 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6141ae2c-fd66-44ee-a430-33ba48bf8d27-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.747348 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.747363 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6141ae2c-fd66-44ee-a430-33ba48bf8d27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.747373 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzxch\" (UniqueName: \"kubernetes.io/projected/6141ae2c-fd66-44ee-a430-33ba48bf8d27-kube-api-access-kzxch\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.944111 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.953507 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.974517 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:29 crc kubenswrapper[4783]: E0930 13:57:29.975180 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-log" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.975203 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-log" Sep 30 13:57:29 crc kubenswrapper[4783]: E0930 13:57:29.975280 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-api" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.975290 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-api" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.975508 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-log" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.975527 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" containerName="nova-api-api" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.976816 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.979553 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.979610 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.979789 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 13:57:29 crc kubenswrapper[4783]: I0930 13:57:29.993672 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057253 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057315 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057371 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057398 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx9rw\" (UniqueName: \"kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057460 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.057485 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159600 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159644 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159747 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159772 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159811 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.159829 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx9rw\" (UniqueName: \"kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.160302 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.166302 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.166775 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.179713 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.180746 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx9rw\" (UniqueName: \"kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.181323 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data\") pod \"nova-api-0\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.304010 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.629371 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerStarted","Data":"cecbe25798bb861b6def416ee9dd34eed22000629d5be8de71c4a766d16e7b32"} Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.826061 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:30 crc kubenswrapper[4783]: I0930 13:57:30.859112 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6141ae2c-fd66-44ee-a430-33ba48bf8d27" path="/var/lib/kubelet/pods/6141ae2c-fd66-44ee-a430-33ba48bf8d27/volumes" Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.644748 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerStarted","Data":"2a75fa3505609d0bdb1e679052bbfa1b815931211ba773373b4cf8cf6a6f8ced"} Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.646420 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerStarted","Data":"c0370e9e7d864c2c9f1bb02da815cdefb288c220f43bfd64692abc5efb6adec5"} Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.646458 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerStarted","Data":"ab6864ab16c498e4305ddbe1aa4bbbb37acd994f33c75be907ff187ab43dcdff"} Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.646467 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerStarted","Data":"75416e10ef23bd39c1504a17e0c88380de42dad5cd46e707781f658bd1c680e0"} Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.668762 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.668744553 podStartE2EDuration="2.668744553s" podCreationTimestamp="2025-09-30 13:57:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:31.66333203 +0000 UTC m=+1351.594798327" watchObservedRunningTime="2025-09-30 13:57:31.668744553 +0000 UTC m=+1351.600210860" Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.939066 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.960848 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.960914 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 13:57:31 crc kubenswrapper[4783]: I0930 13:57:31.960975 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.658817 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerStarted","Data":"2b3fa9c5a1e21601eec7e4e1e10d5c915c51e4786e66caa45491f931fcfc9654"} Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.676057 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.875391 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-5hxlv"] Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.876475 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.880271 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5hxlv"] Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.882001 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.883830 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.975404 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:32.975696 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.016436 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.016491 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.016785 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.017173 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgq4g\" (UniqueName: \"kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.118828 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgq4g\" (UniqueName: \"kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.118940 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.118973 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.119044 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.124953 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.125722 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.127992 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.139195 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgq4g\" (UniqueName: \"kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g\") pod \"nova-cell1-cell-mapping-5hxlv\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.205798 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.443394 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.564055 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.564716 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="dnsmasq-dns" containerID="cri-o://da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05" gracePeriod=10 Sep 30 13:57:33 crc kubenswrapper[4783]: W0930 13:57:33.570894 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4af7ed9_fafa_46b2_87df_f482bed30a5a.slice/crio-5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372 WatchSource:0}: Error finding container 5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372: Status 404 returned error can't find the container with id 5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372 Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.604588 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5hxlv"] Sep 30 13:57:33 crc kubenswrapper[4783]: I0930 13:57:33.706838 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5hxlv" event={"ID":"d4af7ed9-fafa-46b2-87df-f482bed30a5a","Type":"ContainerStarted","Data":"5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372"} Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.246788 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259692 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g229n\" (UniqueName: \"kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259771 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259830 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259854 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259927 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.259955 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc\") pod \"2c3128f8-342d-46a4-a539-bfd0942a8b86\" (UID: \"2c3128f8-342d-46a4-a539-bfd0942a8b86\") " Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.286980 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n" (OuterVolumeSpecName: "kube-api-access-g229n") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "kube-api-access-g229n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.363656 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g229n\" (UniqueName: \"kubernetes.io/projected/2c3128f8-342d-46a4-a539-bfd0942a8b86-kube-api-access-g229n\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.383184 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.413385 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config" (OuterVolumeSpecName: "config") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.414085 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.420032 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.426448 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c3128f8-342d-46a4-a539-bfd0942a8b86" (UID: "2c3128f8-342d-46a4-a539-bfd0942a8b86"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.465593 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.465628 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.465638 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.465646 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.465658 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3128f8-342d-46a4-a539-bfd0942a8b86-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.716970 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5hxlv" event={"ID":"d4af7ed9-fafa-46b2-87df-f482bed30a5a","Type":"ContainerStarted","Data":"9b70fe082872f9b22769a85eabc84223b0211c0b3c90647cad99c0c149caec1e"} Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.718653 4783 generic.go:334] "Generic (PLEG): container finished" podID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerID="da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05" exitCode=0 Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.718689 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.718722 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" event={"ID":"2c3128f8-342d-46a4-a539-bfd0942a8b86","Type":"ContainerDied","Data":"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05"} Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.718745 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d9cc4c77f-bb8wx" event={"ID":"2c3128f8-342d-46a4-a539-bfd0942a8b86","Type":"ContainerDied","Data":"1ff90852cf36ac9f817b1875b7b4422275dd5d46ea31850605d1b3e74b3d4d48"} Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.718762 4783 scope.go:117] "RemoveContainer" containerID="da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.721428 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerStarted","Data":"28abb173cd857cbb9d4a1c5a005edf6a644ce45b75ff79242a7e087b0bfe444f"} Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.721972 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.769655 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-5hxlv" podStartSLOduration=2.769637732 podStartE2EDuration="2.769637732s" podCreationTimestamp="2025-09-30 13:57:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:34.746620628 +0000 UTC m=+1354.678086935" watchObservedRunningTime="2025-09-30 13:57:34.769637732 +0000 UTC m=+1354.701104039" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.770300 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.782101 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d9cc4c77f-bb8wx"] Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.793783 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.412310382 podStartE2EDuration="6.793764583s" podCreationTimestamp="2025-09-30 13:57:28 +0000 UTC" firstStartedPulling="2025-09-30 13:57:29.581159468 +0000 UTC m=+1349.512625785" lastFinishedPulling="2025-09-30 13:57:33.962613679 +0000 UTC m=+1353.894079986" observedRunningTime="2025-09-30 13:57:34.77802238 +0000 UTC m=+1354.709488707" watchObservedRunningTime="2025-09-30 13:57:34.793764583 +0000 UTC m=+1354.725230900" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.811559 4783 scope.go:117] "RemoveContainer" containerID="1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.854455 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" path="/var/lib/kubelet/pods/2c3128f8-342d-46a4-a539-bfd0942a8b86/volumes" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.857091 4783 scope.go:117] "RemoveContainer" containerID="da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05" Sep 30 13:57:34 crc kubenswrapper[4783]: E0930 13:57:34.857454 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05\": container with ID starting with da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05 not found: ID does not exist" containerID="da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.857484 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05"} err="failed to get container status \"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05\": rpc error: code = NotFound desc = could not find container \"da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05\": container with ID starting with da3edee2cd4b177e4ee848d5ef8868fee8f378a21bb73341b9b74c4cfe70ea05 not found: ID does not exist" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.857506 4783 scope.go:117] "RemoveContainer" containerID="1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61" Sep 30 13:57:34 crc kubenswrapper[4783]: E0930 13:57:34.857763 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61\": container with ID starting with 1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61 not found: ID does not exist" containerID="1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61" Sep 30 13:57:34 crc kubenswrapper[4783]: I0930 13:57:34.857798 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61"} err="failed to get container status \"1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61\": rpc error: code = NotFound desc = could not find container \"1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61\": container with ID starting with 1aaec22744a875049f09758bfe3bda799b1561924ec8abcf01651a6c00b6ef61 not found: ID does not exist" Sep 30 13:57:37 crc kubenswrapper[4783]: I0930 13:57:37.674089 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:57:37 crc kubenswrapper[4783]: I0930 13:57:37.674607 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:57:39 crc kubenswrapper[4783]: I0930 13:57:39.797521 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4af7ed9-fafa-46b2-87df-f482bed30a5a" containerID="9b70fe082872f9b22769a85eabc84223b0211c0b3c90647cad99c0c149caec1e" exitCode=0 Sep 30 13:57:39 crc kubenswrapper[4783]: I0930 13:57:39.797987 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5hxlv" event={"ID":"d4af7ed9-fafa-46b2-87df-f482bed30a5a","Type":"ContainerDied","Data":"9b70fe082872f9b22769a85eabc84223b0211c0b3c90647cad99c0c149caec1e"} Sep 30 13:57:40 crc kubenswrapper[4783]: I0930 13:57:40.305460 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:40 crc kubenswrapper[4783]: I0930 13:57:40.305520 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.208011 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.323677 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.323676 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.406982 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data\") pod \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.407859 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle\") pod \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.408007 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts\") pod \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.408134 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgq4g\" (UniqueName: \"kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g\") pod \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\" (UID: \"d4af7ed9-fafa-46b2-87df-f482bed30a5a\") " Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.413811 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g" (OuterVolumeSpecName: "kube-api-access-bgq4g") pod "d4af7ed9-fafa-46b2-87df-f482bed30a5a" (UID: "d4af7ed9-fafa-46b2-87df-f482bed30a5a"). InnerVolumeSpecName "kube-api-access-bgq4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.414376 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts" (OuterVolumeSpecName: "scripts") pod "d4af7ed9-fafa-46b2-87df-f482bed30a5a" (UID: "d4af7ed9-fafa-46b2-87df-f482bed30a5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.442880 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data" (OuterVolumeSpecName: "config-data") pod "d4af7ed9-fafa-46b2-87df-f482bed30a5a" (UID: "d4af7ed9-fafa-46b2-87df-f482bed30a5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.474480 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4af7ed9-fafa-46b2-87df-f482bed30a5a" (UID: "d4af7ed9-fafa-46b2-87df-f482bed30a5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.510728 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.510782 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.510811 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4af7ed9-fafa-46b2-87df-f482bed30a5a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.510836 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgq4g\" (UniqueName: \"kubernetes.io/projected/d4af7ed9-fafa-46b2-87df-f482bed30a5a-kube-api-access-bgq4g\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.824271 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5hxlv" event={"ID":"d4af7ed9-fafa-46b2-87df-f482bed30a5a","Type":"ContainerDied","Data":"5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372"} Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.824312 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5af6e285200a1186eabe6578b394f1d81a2aa12d61a86416f22a5fcd92971372" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.824382 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5hxlv" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.970961 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.973337 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 13:57:41 crc kubenswrapper[4783]: I0930 13:57:41.975874 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.015285 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.015820 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-log" containerID="cri-o://ab6864ab16c498e4305ddbe1aa4bbbb37acd994f33c75be907ff187ab43dcdff" gracePeriod=30 Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.015833 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-api" containerID="cri-o://c0370e9e7d864c2c9f1bb02da815cdefb288c220f43bfd64692abc5efb6adec5" gracePeriod=30 Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.042426 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.042818 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerName="nova-scheduler-scheduler" containerID="cri-o://18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" gracePeriod=30 Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.052023 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.834767 4783 generic.go:334] "Generic (PLEG): container finished" podID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerID="ab6864ab16c498e4305ddbe1aa4bbbb37acd994f33c75be907ff187ab43dcdff" exitCode=143 Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.834842 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerDied","Data":"ab6864ab16c498e4305ddbe1aa4bbbb37acd994f33c75be907ff187ab43dcdff"} Sep 30 13:57:42 crc kubenswrapper[4783]: I0930 13:57:42.854443 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.009399 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.009884 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4af7ed9-fafa-46b2-87df-f482bed30a5a" containerName="nova-manage" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.009907 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4af7ed9-fafa-46b2-87df-f482bed30a5a" containerName="nova-manage" Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.009919 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="dnsmasq-dns" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.009926 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="dnsmasq-dns" Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.009969 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="init" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.009978 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="init" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.010192 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c3128f8-342d-46a4-a539-bfd0942a8b86" containerName="dnsmasq-dns" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.010245 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4af7ed9-fafa-46b2-87df-f482bed30a5a" containerName="nova-manage" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.011887 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.020525 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.025797 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.032117 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.033140 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45wck\" (UniqueName: \"kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.033196 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.033334 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.039600 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:57:43 crc kubenswrapper[4783]: E0930 13:57:43.039753 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerName="nova-scheduler-scheduler" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.135268 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.135397 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45wck\" (UniqueName: \"kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.135455 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.135899 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.135931 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.152797 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45wck\" (UniqueName: \"kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck\") pod \"redhat-operators-n642d\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.341913 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.806853 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.847930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerStarted","Data":"f4349601b7aee08b2675ae4f11dfba3304c808077bef3f4675ff2267e1adc444"} Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.848164 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" containerID="cri-o://f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040" gracePeriod=30 Sep 30 13:57:43 crc kubenswrapper[4783]: I0930 13:57:43.848253 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" containerID="cri-o://abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba" gracePeriod=30 Sep 30 13:57:44 crc kubenswrapper[4783]: I0930 13:57:44.859176 4783 generic.go:334] "Generic (PLEG): container finished" podID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerID="f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040" exitCode=143 Sep 30 13:57:44 crc kubenswrapper[4783]: I0930 13:57:44.859400 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerDied","Data":"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040"} Sep 30 13:57:44 crc kubenswrapper[4783]: I0930 13:57:44.861948 4783 generic.go:334] "Generic (PLEG): container finished" podID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerID="0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48" exitCode=0 Sep 30 13:57:44 crc kubenswrapper[4783]: I0930 13:57:44.861981 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerDied","Data":"0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48"} Sep 30 13:57:44 crc kubenswrapper[4783]: I0930 13:57:44.872111 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.884524 4783 generic.go:334] "Generic (PLEG): container finished" podID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerID="c0370e9e7d864c2c9f1bb02da815cdefb288c220f43bfd64692abc5efb6adec5" exitCode=0 Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.884596 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerDied","Data":"c0370e9e7d864c2c9f1bb02da815cdefb288c220f43bfd64692abc5efb6adec5"} Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.884959 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"042a6b62-e1ae-4b13-8dac-46cf8e244e63","Type":"ContainerDied","Data":"75416e10ef23bd39c1504a17e0c88380de42dad5cd46e707781f658bd1c680e0"} Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.884971 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75416e10ef23bd39c1504a17e0c88380de42dad5cd46e707781f658bd1c680e0" Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.888508 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerStarted","Data":"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8"} Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.986009 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:34876->10.217.0.196:8775: read: connection reset by peer" Sep 30 13:57:46 crc kubenswrapper[4783]: I0930 13:57:46.986068 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:34886->10.217.0.196:8775: read: connection reset by peer" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.017065 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213190 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213602 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213695 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213735 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213793 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.213819 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx9rw\" (UniqueName: \"kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw\") pod \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\" (UID: \"042a6b62-e1ae-4b13-8dac-46cf8e244e63\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.214640 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs" (OuterVolumeSpecName: "logs") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.215467 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/042a6b62-e1ae-4b13-8dac-46cf8e244e63-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.240935 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw" (OuterVolumeSpecName: "kube-api-access-mx9rw") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "kube-api-access-mx9rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.247790 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data" (OuterVolumeSpecName: "config-data") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.259629 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.271925 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.285521 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "042a6b62-e1ae-4b13-8dac-46cf8e244e63" (UID: "042a6b62-e1ae-4b13-8dac-46cf8e244e63"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.317247 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.317288 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.317303 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.317316 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/042a6b62-e1ae-4b13-8dac-46cf8e244e63-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.317329 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx9rw\" (UniqueName: \"kubernetes.io/projected/042a6b62-e1ae-4b13-8dac-46cf8e244e63-kube-api-access-mx9rw\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.351744 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.514584 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.520146 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle\") pod \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.520184 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x8qx\" (UniqueName: \"kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx\") pod \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.520245 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data\") pod \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\" (UID: \"5a4f929b-b804-451c-9dd4-c9e2420b2c0b\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.528338 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx" (OuterVolumeSpecName: "kube-api-access-6x8qx") pod "5a4f929b-b804-451c-9dd4-c9e2420b2c0b" (UID: "5a4f929b-b804-451c-9dd4-c9e2420b2c0b"). InnerVolumeSpecName "kube-api-access-6x8qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.581163 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a4f929b-b804-451c-9dd4-c9e2420b2c0b" (UID: "5a4f929b-b804-451c-9dd4-c9e2420b2c0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.602097 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data" (OuterVolumeSpecName: "config-data") pod "5a4f929b-b804-451c-9dd4-c9e2420b2c0b" (UID: "5a4f929b-b804-451c-9dd4-c9e2420b2c0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.621917 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle\") pod \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.622018 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs\") pod \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.622065 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs\") pod \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.622095 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data\") pod \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.622191 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc8d7\" (UniqueName: \"kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7\") pod \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\" (UID: \"5f3f45e8-14eb-423b-8aab-668a84f21c9e\") " Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.623133 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.623202 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x8qx\" (UniqueName: \"kubernetes.io/projected/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-kube-api-access-6x8qx\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.623274 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a4f929b-b804-451c-9dd4-c9e2420b2c0b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.623291 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs" (OuterVolumeSpecName: "logs") pod "5f3f45e8-14eb-423b-8aab-668a84f21c9e" (UID: "5f3f45e8-14eb-423b-8aab-668a84f21c9e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.629684 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7" (OuterVolumeSpecName: "kube-api-access-tc8d7") pod "5f3f45e8-14eb-423b-8aab-668a84f21c9e" (UID: "5f3f45e8-14eb-423b-8aab-668a84f21c9e"). InnerVolumeSpecName "kube-api-access-tc8d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.653453 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f3f45e8-14eb-423b-8aab-668a84f21c9e" (UID: "5f3f45e8-14eb-423b-8aab-668a84f21c9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.664397 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data" (OuterVolumeSpecName: "config-data") pod "5f3f45e8-14eb-423b-8aab-668a84f21c9e" (UID: "5f3f45e8-14eb-423b-8aab-668a84f21c9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.684010 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5f3f45e8-14eb-423b-8aab-668a84f21c9e" (UID: "5f3f45e8-14eb-423b-8aab-668a84f21c9e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.725182 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.725270 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f3f45e8-14eb-423b-8aab-668a84f21c9e-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.725289 4783 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.725305 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3f45e8-14eb-423b-8aab-668a84f21c9e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.725317 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc8d7\" (UniqueName: \"kubernetes.io/projected/5f3f45e8-14eb-423b-8aab-668a84f21c9e-kube-api-access-tc8d7\") on node \"crc\" DevicePath \"\"" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.901970 4783 generic.go:334] "Generic (PLEG): container finished" podID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerID="abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba" exitCode=0 Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.902293 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerDied","Data":"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba"} Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.902319 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f3f45e8-14eb-423b-8aab-668a84f21c9e","Type":"ContainerDied","Data":"686e8122e29dd65e065a678c80154ea6197f21ddec951880f157574ec3ef8cbe"} Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.902338 4783 scope.go:117] "RemoveContainer" containerID="abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.902466 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.907846 4783 generic.go:334] "Generic (PLEG): container finished" podID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerID="cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8" exitCode=0 Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.907928 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerDied","Data":"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8"} Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.913794 4783 generic.go:334] "Generic (PLEG): container finished" podID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" exitCode=0 Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.913877 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5a4f929b-b804-451c-9dd4-c9e2420b2c0b","Type":"ContainerDied","Data":"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628"} Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.913913 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.913942 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5a4f929b-b804-451c-9dd4-c9e2420b2c0b","Type":"ContainerDied","Data":"57f80f61e091ac2dfe95e3763b404635b161d9ed34844d4dbc21c73bf7c2ad18"} Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.913914 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.943920 4783 scope.go:117] "RemoveContainer" containerID="f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040" Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.961444 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:47 crc kubenswrapper[4783]: I0930 13:57:47.989254 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.005351 4783 scope.go:117] "RemoveContainer" containerID="abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.005782 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba\": container with ID starting with abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba not found: ID does not exist" containerID="abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.005815 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba"} err="failed to get container status \"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba\": rpc error: code = NotFound desc = could not find container \"abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba\": container with ID starting with abd6bb488dc2668134762a127ca305ad648a5b4a3a6203b918747f3350db34ba not found: ID does not exist" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.005842 4783 scope.go:117] "RemoveContainer" containerID="f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.006081 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040\": container with ID starting with f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040 not found: ID does not exist" containerID="f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.006103 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040"} err="failed to get container status \"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040\": rpc error: code = NotFound desc = could not find container \"f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040\": container with ID starting with f72cd302f85dcf7dc37c0a07d719d5fa10b81a37de3b46e6cdb08482fba42040 not found: ID does not exist" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.006120 4783 scope.go:117] "RemoveContainer" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007025 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.007531 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-log" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007548 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-log" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.007571 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007579 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.007600 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-api" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007607 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-api" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.007626 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007633 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.007641 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerName="nova-scheduler-scheduler" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007647 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerName="nova-scheduler-scheduler" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007891 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-api" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007914 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" containerName="nova-scheduler-scheduler" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007927 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-log" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007948 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" containerName="nova-metadata-metadata" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.007961 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" containerName="nova-api-log" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.009931 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.016794 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.017103 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.039364 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.049242 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.050405 4783 scope.go:117] "RemoveContainer" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.057431 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: E0930 13:57:48.058214 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628\": container with ID starting with 18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628 not found: ID does not exist" containerID="18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.058273 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628"} err="failed to get container status \"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628\": rpc error: code = NotFound desc = could not find container \"18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628\": container with ID starting with 18aeab0233fb2606a1b8d4b314136f935b9e60a7da0282d0778cb68b729b7628 not found: ID does not exist" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.065021 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.072229 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.082371 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.083847 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.087493 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.087566 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.088684 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.094327 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.107708 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.109386 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.112081 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.134705 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135584 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135613 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135657 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135774 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135836 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2tcr\" (UniqueName: \"kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135863 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135895 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.135964 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkrvv\" (UniqueName: \"kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.136007 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.136062 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.136193 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.238001 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.238709 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.238971 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2tcr\" (UniqueName: \"kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.239262 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.239487 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.239686 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkrvv\" (UniqueName: \"kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.239879 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.240167 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cspcg\" (UniqueName: \"kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.240031 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.240418 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.240713 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.240962 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.241099 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.241357 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.241523 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.241754 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.242830 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.242855 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.244566 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.244709 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.244709 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.245682 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.248735 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.255083 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2tcr\" (UniqueName: \"kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr\") pod \"nova-metadata-0\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.264728 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkrvv\" (UniqueName: \"kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv\") pod \"nova-api-0\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.343870 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cspcg\" (UniqueName: \"kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.343993 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.344145 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.344346 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.348114 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.360297 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.369902 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cspcg\" (UniqueName: \"kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg\") pod \"nova-scheduler-0\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.414588 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.427439 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.856432 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="042a6b62-e1ae-4b13-8dac-46cf8e244e63" path="/var/lib/kubelet/pods/042a6b62-e1ae-4b13-8dac-46cf8e244e63/volumes" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.858240 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a4f929b-b804-451c-9dd4-c9e2420b2c0b" path="/var/lib/kubelet/pods/5a4f929b-b804-451c-9dd4-c9e2420b2c0b/volumes" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.859813 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f3f45e8-14eb-423b-8aab-668a84f21c9e" path="/var/lib/kubelet/pods/5f3f45e8-14eb-423b-8aab-668a84f21c9e/volumes" Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.860432 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.925365 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerStarted","Data":"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21"} Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.933744 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerStarted","Data":"b4fa04f51c2a8427c20ea0732dd8d2f083b65b6a74df54fd584091c2a91a309a"} Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.945305 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:57:48 crc kubenswrapper[4783]: W0930 13:57:48.960163 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0280c83_c3f5_45d6_abb4_df04dbeed8e3.slice/crio-eb422180d6c2172a15a7791d0b32720cc74bfe22376ccdf6707ae39cbdd2098d WatchSource:0}: Error finding container eb422180d6c2172a15a7791d0b32720cc74bfe22376ccdf6707ae39cbdd2098d: Status 404 returned error can't find the container with id eb422180d6c2172a15a7791d0b32720cc74bfe22376ccdf6707ae39cbdd2098d Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.961168 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n642d" podStartSLOduration=3.163147446 podStartE2EDuration="6.961151223s" podCreationTimestamp="2025-09-30 13:57:42 +0000 UTC" firstStartedPulling="2025-09-30 13:57:44.871899009 +0000 UTC m=+1364.803365316" lastFinishedPulling="2025-09-30 13:57:48.669902786 +0000 UTC m=+1368.601369093" observedRunningTime="2025-09-30 13:57:48.955507781 +0000 UTC m=+1368.886974088" watchObservedRunningTime="2025-09-30 13:57:48.961151223 +0000 UTC m=+1368.892617530" Sep 30 13:57:48 crc kubenswrapper[4783]: W0930 13:57:48.964341 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod990cfb5a_6508_4344_9df7_391f55a70bd8.slice/crio-7ca31d61a9efdb8c98cca81ee99089ae9fa0855a0d0600b941c46d33ddf7b793 WatchSource:0}: Error finding container 7ca31d61a9efdb8c98cca81ee99089ae9fa0855a0d0600b941c46d33ddf7b793: Status 404 returned error can't find the container with id 7ca31d61a9efdb8c98cca81ee99089ae9fa0855a0d0600b941c46d33ddf7b793 Sep 30 13:57:48 crc kubenswrapper[4783]: I0930 13:57:48.976017 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.944551 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerStarted","Data":"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.944930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerStarted","Data":"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.944971 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerStarted","Data":"7ca31d61a9efdb8c98cca81ee99089ae9fa0855a0d0600b941c46d33ddf7b793"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.947278 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d0280c83-c3f5-45d6-abb4-df04dbeed8e3","Type":"ContainerStarted","Data":"c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.947304 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d0280c83-c3f5-45d6-abb4-df04dbeed8e3","Type":"ContainerStarted","Data":"eb422180d6c2172a15a7791d0b32720cc74bfe22376ccdf6707ae39cbdd2098d"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.949113 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerStarted","Data":"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.949134 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerStarted","Data":"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc"} Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.970398 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.970380951 podStartE2EDuration="2.970380951s" podCreationTimestamp="2025-09-30 13:57:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:49.966836976 +0000 UTC m=+1369.898303293" watchObservedRunningTime="2025-09-30 13:57:49.970380951 +0000 UTC m=+1369.901847258" Sep 30 13:57:49 crc kubenswrapper[4783]: I0930 13:57:49.991369 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.991350566 podStartE2EDuration="1.991350566s" podCreationTimestamp="2025-09-30 13:57:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:49.987082959 +0000 UTC m=+1369.918549276" watchObservedRunningTime="2025-09-30 13:57:49.991350566 +0000 UTC m=+1369.922816873" Sep 30 13:57:50 crc kubenswrapper[4783]: I0930 13:57:50.016698 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.016672802 podStartE2EDuration="3.016672802s" podCreationTimestamp="2025-09-30 13:57:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 13:57:50.010248386 +0000 UTC m=+1369.941714703" watchObservedRunningTime="2025-09-30 13:57:50.016672802 +0000 UTC m=+1369.948139109" Sep 30 13:57:53 crc kubenswrapper[4783]: I0930 13:57:53.342997 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:53 crc kubenswrapper[4783]: I0930 13:57:53.343744 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:57:53 crc kubenswrapper[4783]: I0930 13:57:53.344524 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:57:53 crc kubenswrapper[4783]: I0930 13:57:53.344567 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 13:57:53 crc kubenswrapper[4783]: I0930 13:57:53.427628 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 13:57:54 crc kubenswrapper[4783]: I0930 13:57:54.405828 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n642d" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="registry-server" probeResult="failure" output=< Sep 30 13:57:54 crc kubenswrapper[4783]: timeout: failed to connect service ":50051" within 1s Sep 30 13:57:54 crc kubenswrapper[4783]: > Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.345176 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.345839 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.415284 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.415396 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.428424 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 13:57:58 crc kubenswrapper[4783]: I0930 13:57:58.463119 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.076692 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.087785 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.364496 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.365102 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.432453 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:57:59 crc kubenswrapper[4783]: I0930 13:57:59.432460 4783 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 13:58:03 crc kubenswrapper[4783]: I0930 13:58:03.418904 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:58:03 crc kubenswrapper[4783]: I0930 13:58:03.494651 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:58:03 crc kubenswrapper[4783]: I0930 13:58:03.660506 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.115575 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n642d" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="registry-server" containerID="cri-o://c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21" gracePeriod=2 Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.569960 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.693567 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content\") pod \"4d7475fe-0651-421e-b8f2-a89f24f32779\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.693655 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45wck\" (UniqueName: \"kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck\") pod \"4d7475fe-0651-421e-b8f2-a89f24f32779\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.693825 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities\") pod \"4d7475fe-0651-421e-b8f2-a89f24f32779\" (UID: \"4d7475fe-0651-421e-b8f2-a89f24f32779\") " Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.694408 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities" (OuterVolumeSpecName: "utilities") pod "4d7475fe-0651-421e-b8f2-a89f24f32779" (UID: "4d7475fe-0651-421e-b8f2-a89f24f32779"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.703410 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck" (OuterVolumeSpecName: "kube-api-access-45wck") pod "4d7475fe-0651-421e-b8f2-a89f24f32779" (UID: "4d7475fe-0651-421e-b8f2-a89f24f32779"). InnerVolumeSpecName "kube-api-access-45wck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.769240 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d7475fe-0651-421e-b8f2-a89f24f32779" (UID: "4d7475fe-0651-421e-b8f2-a89f24f32779"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.795839 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.795917 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45wck\" (UniqueName: \"kubernetes.io/projected/4d7475fe-0651-421e-b8f2-a89f24f32779-kube-api-access-45wck\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:05 crc kubenswrapper[4783]: I0930 13:58:05.795938 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7475fe-0651-421e-b8f2-a89f24f32779-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.131164 4783 generic.go:334] "Generic (PLEG): container finished" podID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerID="c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21" exitCode=0 Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.131212 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerDied","Data":"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21"} Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.131297 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n642d" event={"ID":"4d7475fe-0651-421e-b8f2-a89f24f32779","Type":"ContainerDied","Data":"f4349601b7aee08b2675ae4f11dfba3304c808077bef3f4675ff2267e1adc444"} Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.131321 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n642d" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.131326 4783 scope.go:117] "RemoveContainer" containerID="c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.153448 4783 scope.go:117] "RemoveContainer" containerID="cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.170483 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.178475 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n642d"] Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.188314 4783 scope.go:117] "RemoveContainer" containerID="0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.214883 4783 scope.go:117] "RemoveContainer" containerID="c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21" Sep 30 13:58:06 crc kubenswrapper[4783]: E0930 13:58:06.215247 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21\": container with ID starting with c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21 not found: ID does not exist" containerID="c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.215292 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21"} err="failed to get container status \"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21\": rpc error: code = NotFound desc = could not find container \"c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21\": container with ID starting with c42025b8d4dab0a4d77d39eef9ea7922bcde86824787e0601b2654661bbf3b21 not found: ID does not exist" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.215398 4783 scope.go:117] "RemoveContainer" containerID="cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8" Sep 30 13:58:06 crc kubenswrapper[4783]: E0930 13:58:06.215738 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8\": container with ID starting with cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8 not found: ID does not exist" containerID="cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.215766 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8"} err="failed to get container status \"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8\": rpc error: code = NotFound desc = could not find container \"cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8\": container with ID starting with cbcf49c87071faf2557e07f6701499dd081ccab24336ed164c8466cda6e7f0a8 not found: ID does not exist" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.215789 4783 scope.go:117] "RemoveContainer" containerID="0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48" Sep 30 13:58:06 crc kubenswrapper[4783]: E0930 13:58:06.216054 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48\": container with ID starting with 0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48 not found: ID does not exist" containerID="0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.216079 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48"} err="failed to get container status \"0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48\": rpc error: code = NotFound desc = could not find container \"0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48\": container with ID starting with 0c4bbf1339d58b2c3f3092b53fd4dfca6ca31c3fa295137e4424d23331134b48 not found: ID does not exist" Sep 30 13:58:06 crc kubenswrapper[4783]: I0930 13:58:06.855039 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" path="/var/lib/kubelet/pods/4d7475fe-0651-421e-b8f2-a89f24f32779/volumes" Sep 30 13:58:07 crc kubenswrapper[4783]: I0930 13:58:07.674333 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:58:07 crc kubenswrapper[4783]: I0930 13:58:07.674404 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.352455 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.355201 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.360194 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.423819 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.424344 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.438306 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 13:58:08 crc kubenswrapper[4783]: I0930 13:58:08.439366 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 13:58:09 crc kubenswrapper[4783]: I0930 13:58:09.168184 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 13:58:09 crc kubenswrapper[4783]: I0930 13:58:09.176646 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 13:58:09 crc kubenswrapper[4783]: I0930 13:58:09.176739 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.517789 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.519520 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" containerName="openstackclient" containerID="cri-o://8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4" gracePeriod=2 Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.555569 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.583302 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.583566 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-9w2wl" podUID="801ddf87-455e-4941-8637-4c2f5da49d41" containerName="openstack-network-exporter" containerID="cri-o://25fabd726430aea0cdf79b31be6e26feb4aede29cf3f33ef9ba4f73371bf719a" gracePeriod=30 Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.608468 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.648642 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.649043 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="extract-utilities" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649058 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="extract-utilities" Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.649075 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="extract-content" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649082 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="extract-content" Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.649090 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" containerName="openstackclient" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649096 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" containerName="openstackclient" Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.649121 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="registry-server" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649127 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="registry-server" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649316 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" containerName="openstackclient" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649333 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d7475fe-0651-421e-b8f2-a89f24f32779" containerName="registry-server" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.649973 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.675761 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.675964 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data podName:b901a1db-0fb0-4d58-be99-fdfd812683e6 nodeName:}" failed. No retries permitted until 2025-09-30 13:58:29.175947525 +0000 UTC m=+1409.107413832 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data") pod "rabbitmq-cell1-server-0" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.682184 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.717907 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.772339 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.784406 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxvcg\" (UniqueName: \"kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg\") pod \"neutronac9c-account-delete-9kkgx\" (UID: \"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf\") " pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.809586 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.809872 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="ovn-northd" containerID="cri-o://35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" gracePeriod=30 Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.810022 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="openstack-network-exporter" containerID="cri-o://ad940715dd3642761ec895a6d3116d512bb6c980dd421be308131af63b9114b0" gracePeriod=30 Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.818000 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.819259 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.830667 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.842300 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.877146 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.879001 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.879130 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.882674 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-wvfc5"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.885945 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxvcg\" (UniqueName: \"kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg\") pod \"neutronac9c-account-delete-9kkgx\" (UID: \"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf\") " pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.886371 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2nvg\" (UniqueName: \"kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg\") pod \"cinder230c-account-delete-xk8kk\" (UID: \"5026d481-7d2b-40cd-8369-17892ed22c77\") " pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.912469 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-wvfc5"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.916434 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxvcg\" (UniqueName: \"kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg\") pod \"neutronac9c-account-delete-9kkgx\" (UID: \"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf\") " pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.941864 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-qcbqm"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.974046 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-qcbqm"] Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.988213 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfllx\" (UniqueName: \"kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx\") pod \"placement92eb-account-delete-tcr2d\" (UID: \"957739b9-90a0-43bf-a5a4-9558993b660f\") " pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.988550 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2nvg\" (UniqueName: \"kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg\") pod \"cinder230c-account-delete-xk8kk\" (UID: \"5026d481-7d2b-40cd-8369-17892ed22c77\") " pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:28 crc kubenswrapper[4783]: I0930 13:58:28.992848 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.993854 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 13:58:28 crc kubenswrapper[4783]: E0930 13:58:28.993910 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data podName:164c5743-32f5-4347-9c9d-20d28f1f2dce nodeName:}" failed. No retries permitted until 2025-09-30 13:58:29.493892333 +0000 UTC m=+1409.425358640 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data") pod "rabbitmq-server-0" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce") : configmap "rabbitmq-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.016261 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-th9hn"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.022927 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2nvg\" (UniqueName: \"kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg\") pod \"cinder230c-account-delete-xk8kk\" (UID: \"5026d481-7d2b-40cd-8369-17892ed22c77\") " pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.035754 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-th9hn"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.083610 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.092912 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfllx\" (UniqueName: \"kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx\") pod \"placement92eb-account-delete-tcr2d\" (UID: \"957739b9-90a0-43bf-a5a4-9558993b660f\") " pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.105399 4783 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-th6r6" message="Exiting ovn-controller (1) " Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.105436 4783 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-th6r6" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" containerID="cri-o://78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.105469 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-th6r6" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" containerID="cri-o://78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" gracePeriod=30 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.120733 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfllx\" (UniqueName: \"kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx\") pod \"placement92eb-account-delete-tcr2d\" (UID: \"957739b9-90a0-43bf-a5a4-9558993b660f\") " pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.171385 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.194522 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.194861 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data podName:b901a1db-0fb0-4d58-be99-fdfd812683e6 nodeName:}" failed. No retries permitted until 2025-09-30 13:58:30.194842829 +0000 UTC m=+1410.126309136 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data") pod "rabbitmq-cell1-server-0" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.214623 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.252202 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-mvmd6"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.265870 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-mvmd6"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.328684 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.329666 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="openstack-network-exporter" containerID="cri-o://f963b067d2255b5af1022d6dd485fe3e8d7ab5715354c349f168effd5f9bdcd4" gracePeriod=300 Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.390930 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b is running failed: container process not found" containerID="78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.392287 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b is running failed: container process not found" containerID="78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.393201 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b is running failed: container process not found" containerID="78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.393278 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-th6r6" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.393475 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.409896 4783 generic.go:334] "Generic (PLEG): container finished" podID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerID="78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" exitCode=0 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.410002 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6" event={"ID":"8bc852c2-c59b-4b84-bbfc-c8b62354c66d","Type":"ContainerDied","Data":"78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b"} Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.457912 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3e0048e0-a916-434d-abd4-571cec7d4b6a/ovn-northd/0.log" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.457957 4783 generic.go:334] "Generic (PLEG): container finished" podID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerID="ad940715dd3642761ec895a6d3116d512bb6c980dd421be308131af63b9114b0" exitCode=2 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.457973 4783 generic.go:334] "Generic (PLEG): container finished" podID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerID="35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" exitCode=143 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.458038 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerDied","Data":"ad940715dd3642761ec895a6d3116d512bb6c980dd421be308131af63b9114b0"} Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.458067 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerDied","Data":"35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318"} Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.473966 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-9w2wl_801ddf87-455e-4941-8637-4c2f5da49d41/openstack-network-exporter/0.log" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.474007 4783 generic.go:334] "Generic (PLEG): container finished" podID="801ddf87-455e-4941-8637-4c2f5da49d41" containerID="25fabd726430aea0cdf79b31be6e26feb4aede29cf3f33ef9ba4f73371bf719a" exitCode=2 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.475306 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-9w2wl" event={"ID":"801ddf87-455e-4941-8637-4c2f5da49d41","Type":"ContainerDied","Data":"25fabd726430aea0cdf79b31be6e26feb4aede29cf3f33ef9ba4f73371bf719a"} Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.513681 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.513743 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data podName:164c5743-32f5-4347-9c9d-20d28f1f2dce nodeName:}" failed. No retries permitted until 2025-09-30 13:58:30.513729077 +0000 UTC m=+1410.445195384 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data") pod "rabbitmq-server-0" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce") : configmap "rabbitmq-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.661288 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="ovsdbserver-sb" containerID="cri-o://fcc76b379c0fd4e6d39b37b60874fb17163db1dfdebc6baf9355f72a722f6cb8" gracePeriod=300 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.661641 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="rabbitmq" containerID="cri-o://fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998" gracePeriod=604800 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.670276 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.671478 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.722063 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwv9q\" (UniqueName: \"kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q\") pod \"novacell12a43-account-delete-wc5dr\" (UID: \"634bd6a4-be67-43db-b032-7e083edce6eb\") " pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.741109 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="rabbitmq" containerID="cri-o://2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d" gracePeriod=604800 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.765960 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-dp2b8"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.780005 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-dp2b8"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.804184 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.804265 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.804506 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c9bc45547-5grb6" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-api" containerID="cri-o://082fb383f645276bbaa075b85be6d49c88105cfa13629a7bcfb3725d2695cb56" gracePeriod=30 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.804659 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c9bc45547-5grb6" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-httpd" containerID="cri-o://a27d904ac13e2388585d777e5ccadfdbee717dc617233684f8f3e68da41b3116" gracePeriod=30 Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.823752 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwv9q\" (UniqueName: \"kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q\") pod \"novacell12a43-account-delete-wc5dr\" (UID: \"634bd6a4-be67-43db-b032-7e083edce6eb\") " pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:29 crc kubenswrapper[4783]: I0930 13:58:29.853801 4783 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/placement-784897656b-2kp66" secret="" err="secret \"placement-placement-dockercfg-2tgtm\" not found" Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.932977 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318 is running failed: container process not found" containerID="35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.936173 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318 is running failed: container process not found" containerID="35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.936735 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318 is running failed: container process not found" containerID="35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.936807 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="ovn-northd" Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.965171 4783 secret.go:188] Couldn't get secret openstack/placement-scripts: secret "placement-scripts" not found Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.965304 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:30.46526057 +0000 UTC m=+1410.396726877 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-scripts" not found Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.965319 4783 secret.go:188] Couldn't get secret openstack/placement-config-data: secret "placement-config-data" not found Sep 30 13:58:29 crc kubenswrapper[4783]: E0930 13:58:29.965436 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:30.465404464 +0000 UTC m=+1410.396870761 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.001361 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwv9q\" (UniqueName: \"kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q\") pod \"novacell12a43-account-delete-wc5dr\" (UID: \"634bd6a4-be67-43db-b032-7e083edce6eb\") " pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.030086 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.030485 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="openstack-network-exporter" containerID="cri-o://77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71" gracePeriod=300 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.052835 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-j5vlb"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.062501 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-j5vlb"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.096742 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.106124 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.132927 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.141039 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.141294 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api-log" containerID="cri-o://117dc1758063264abe9e841b11bdaac117d677993965b0ae1f6a1d411ffb2d13" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.141652 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api" containerID="cri-o://6deab5bf48649d7f6437dbb5f0e0ebd19ad06c0737cc1e0e97eeb7f38e12f735" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.178012 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.179325 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.206752 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.207549 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="dnsmasq-dns" containerID="cri-o://e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14" gracePeriod=10 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.240830 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241311 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-server" containerID="cri-o://19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241694 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="swift-recon-cron" containerID="cri-o://6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241734 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="rsync" containerID="cri-o://0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241775 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-expirer" containerID="cri-o://a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241807 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-updater" containerID="cri-o://e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241837 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-auditor" containerID="cri-o://65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241865 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-replicator" containerID="cri-o://f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241897 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-server" containerID="cri-o://9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241926 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-updater" containerID="cri-o://252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241955 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-auditor" containerID="cri-o://84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.241983 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-replicator" containerID="cri-o://b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.242012 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-server" containerID="cri-o://5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.242039 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-reaper" containerID="cri-o://bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.242071 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-auditor" containerID="cri-o://8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.242100 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-replicator" containerID="cri-o://e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.253447 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="ovsdbserver-nb" containerID="cri-o://723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8" gracePeriod=300 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.262876 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.263145 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="cinder-scheduler" containerID="cri-o://024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.263602 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="probe" containerID="cri-o://d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.281362 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.281731 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data podName:b901a1db-0fb0-4d58-be99-fdfd812683e6 nodeName:}" failed. No retries permitted until 2025-09-30 13:58:32.281709879 +0000 UTC m=+1412.213176186 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data") pod "rabbitmq-cell1-server-0" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.293641 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.326810 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.327826 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-httpd" containerID="cri-o://069ecdff26e68e0d96f961dea1f277e66bd9d7eb17de82605d0a89f72c085c42" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.327430 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-log" containerID="cri-o://40a405fda44ba184b836f4f22105f610a1f2f4078bd7ae78c09b94c9367d95c2" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.349186 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-z7899"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.370148 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.376502 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-httpd" containerID="cri-o://1bccf10c6c93de0d51e10e64262519a909d10f198ba045f898de5f0df6447a1d" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.377582 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-log" containerID="cri-o://9c00cf71bb8b3efbefb119c4700536e994f2ca1128db0c5280a9b57683983551" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.385051 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqt6r\" (UniqueName: \"kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r\") pod \"novaapi1d76-account-delete-ncj8j\" (UID: \"36fb1123-03da-4b8c-b9b1-39caa412db70\") " pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.389918 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-z7899"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.420100 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-5hxlv"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.439698 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-5hxlv"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.464115 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-g9gmd"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.488149 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqt6r\" (UniqueName: \"kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r\") pod \"novaapi1d76-account-delete-ncj8j\" (UID: \"36fb1123-03da-4b8c-b9b1-39caa412db70\") " pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.488502 4783 secret.go:188] Couldn't get secret openstack/placement-scripts: secret "placement-scripts" not found Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.488588 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:31.488563796 +0000 UTC m=+1411.420030173 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-scripts" not found Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.488739 4783 secret.go:188] Couldn't get secret openstack/placement-config-data: secret "placement-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.488795 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:31.488777362 +0000 UTC m=+1411.420243769 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.490959 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-g9gmd"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.517680 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronac9c-account-delete-9kkgx" event={"ID":"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf","Type":"ContainerStarted","Data":"dfe3dbd2d94f43f1b092a66251df301a932e8457f189a362a4eb7b300e79e3ff"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.527956 4783 generic.go:334] "Generic (PLEG): container finished" podID="aea997d7-7510-42b0-91f8-07592048868f" containerID="a27d904ac13e2388585d777e5ccadfdbee717dc617233684f8f3e68da41b3116" exitCode=0 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.528069 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerDied","Data":"a27d904ac13e2388585d777e5ccadfdbee717dc617233684f8f3e68da41b3116"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.547733 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqt6r\" (UniqueName: \"kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r\") pod \"novaapi1d76-account-delete-ncj8j\" (UID: \"36fb1123-03da-4b8c-b9b1-39caa412db70\") " pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.568371 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ac9c-account-create-lkwhv"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.574643 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f7ef9466-e9f5-467e-9b43-2b7952e5b479/ovsdbserver-nb/0.log" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.574701 4783 generic.go:334] "Generic (PLEG): container finished" podID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerID="77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71" exitCode=2 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.574791 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerDied","Data":"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.582737 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.591412 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.591476 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data podName:164c5743-32f5-4347-9c9d-20d28f1f2dce nodeName:}" failed. No retries permitted until 2025-09-30 13:58:32.591461982 +0000 UTC m=+1412.522928289 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data") pod "rabbitmq-server-0" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce") : configmap "rabbitmq-config-data" not found Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.594079 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ac9c-account-create-lkwhv"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.616888 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.634166 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-bs8sx"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.662677 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-bs8sx"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.678026 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c707a7c5-54fa-4430-8bbe-ac8eebbb0a59/ovsdbserver-sb/0.log" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.678086 4783 generic.go:334] "Generic (PLEG): container finished" podID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerID="f963b067d2255b5af1022d6dd485fe3e8d7ab5715354c349f168effd5f9bdcd4" exitCode=2 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.678109 4783 generic.go:334] "Generic (PLEG): container finished" podID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerID="fcc76b379c0fd4e6d39b37b60874fb17163db1dfdebc6baf9355f72a722f6cb8" exitCode=143 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.678193 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerDied","Data":"f963b067d2255b5af1022d6dd485fe3e8d7ab5715354c349f168effd5f9bdcd4"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.678247 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerDied","Data":"fcc76b379c0fd4e6d39b37b60874fb17163db1dfdebc6baf9355f72a722f6cb8"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.703951 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-d5qms"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.718291 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-d5qms"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.739567 4783 generic.go:334] "Generic (PLEG): container finished" podID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerID="117dc1758063264abe9e841b11bdaac117d677993965b0ae1f6a1d411ffb2d13" exitCode=143 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.739844 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-784897656b-2kp66" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-log" containerID="cri-o://8d42af40fdf6ba3fbff5a708098b5ffb7371fc3b476c8c5bc057e0dc5815233d" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.741704 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-784897656b-2kp66" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-api" containerID="cri-o://8df62f6d21d21c10de4af33338f5e6aaa9331745b50e08b8d0f63b05fcdf0a2f" gracePeriod=30 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.741864 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-92eb-account-create-pr6m9"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.741892 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerDied","Data":"117dc1758063264abe9e841b11bdaac117d677993965b0ae1f6a1d411ffb2d13"} Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.761281 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-230c-account-create-9rv9g"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.776877 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-e8d3-account-create-whrv6"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.789576 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.807734 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-92eb-account-create-pr6m9"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.808009 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-e8d3-account-create-whrv6"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.833838 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-230c-account-create-9rv9g"] Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.835485 4783 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Sep 30 13:58:30 crc kubenswrapper[4783]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 13:58:30 crc kubenswrapper[4783]: + source /usr/local/bin/container-scripts/functions Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNBridge=br-int Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNRemote=tcp:localhost:6642 Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNEncapType=geneve Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNAvailabilityZones= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ EnableChassisAsGateway=true Sep 30 13:58:30 crc kubenswrapper[4783]: ++ PhysicalNetworks= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNHostName= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 13:58:30 crc kubenswrapper[4783]: ++ ovs_dir=/var/lib/openvswitch Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 13:58:30 crc kubenswrapper[4783]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + cleanup_ovsdb_server_semaphore Sep 30 13:58:30 crc kubenswrapper[4783]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 13:58:30 crc kubenswrapper[4783]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-ttc29" message=< Sep 30 13:58:30 crc kubenswrapper[4783]: Exiting ovsdb-server (5) ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 13:58:30 crc kubenswrapper[4783]: + source /usr/local/bin/container-scripts/functions Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNBridge=br-int Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNRemote=tcp:localhost:6642 Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNEncapType=geneve Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNAvailabilityZones= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ EnableChassisAsGateway=true Sep 30 13:58:30 crc kubenswrapper[4783]: ++ PhysicalNetworks= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNHostName= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 13:58:30 crc kubenswrapper[4783]: ++ ovs_dir=/var/lib/openvswitch Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 13:58:30 crc kubenswrapper[4783]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + cleanup_ovsdb_server_semaphore Sep 30 13:58:30 crc kubenswrapper[4783]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 13:58:30 crc kubenswrapper[4783]: > Sep 30 13:58:30 crc kubenswrapper[4783]: E0930 13:58:30.841455 4783 kuberuntime_container.go:691] "PreStop hook failed" err=< Sep 30 13:58:30 crc kubenswrapper[4783]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 30 13:58:30 crc kubenswrapper[4783]: + source /usr/local/bin/container-scripts/functions Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNBridge=br-int Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNRemote=tcp:localhost:6642 Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNEncapType=geneve Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNAvailabilityZones= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ EnableChassisAsGateway=true Sep 30 13:58:30 crc kubenswrapper[4783]: ++ PhysicalNetworks= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ OVNHostName= Sep 30 13:58:30 crc kubenswrapper[4783]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 30 13:58:30 crc kubenswrapper[4783]: ++ ovs_dir=/var/lib/openvswitch Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 30 13:58:30 crc kubenswrapper[4783]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 30 13:58:30 crc kubenswrapper[4783]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + sleep 0.5 Sep 30 13:58:30 crc kubenswrapper[4783]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 30 13:58:30 crc kubenswrapper[4783]: + cleanup_ovsdb_server_semaphore Sep 30 13:58:30 crc kubenswrapper[4783]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 30 13:58:30 crc kubenswrapper[4783]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 30 13:58:30 crc kubenswrapper[4783]: > pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" containerID="cri-o://ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.842376 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" containerID="cri-o://ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" gracePeriod=28 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.846098 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" containerID="cri-o://dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" gracePeriod=28 Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.877515 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="011eca05-b58b-4412-b0d8-3700bb26099b" path="/var/lib/kubelet/pods/011eca05-b58b-4412-b0d8-3700bb26099b/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.878973 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ac8973-fc47-4f89-86d2-b973ef33a21d" path="/var/lib/kubelet/pods/01ac8973-fc47-4f89-86d2-b973ef33a21d/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.882275 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04087483-d2dd-4f70-99f1-592a46394263" path="/var/lib/kubelet/pods/04087483-d2dd-4f70-99f1-592a46394263/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.883360 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13707881-f4b3-4fea-b926-3724eb156688" path="/var/lib/kubelet/pods/13707881-f4b3-4fea-b926-3724eb156688/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.884492 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2976d001-3d08-4721-85db-95c0a0de28b8" path="/var/lib/kubelet/pods/2976d001-3d08-4721-85db-95c0a0de28b8/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.889307 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50128352-8027-4b7a-af43-18310b14ca16" path="/var/lib/kubelet/pods/50128352-8027-4b7a-af43-18310b14ca16/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.901162 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e80664f-1f1f-4183-a655-ca8f7e8e4af8" path="/var/lib/kubelet/pods/5e80664f-1f1f-4183-a655-ca8f7e8e4af8/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.908855 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fe5df32-3b3d-486a-abe4-0e04c91c54c6" path="/var/lib/kubelet/pods/8fe5df32-3b3d-486a-abe4-0e04c91c54c6/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.910254 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99edfb42-ed13-471e-8e93-62ccafc5b190" path="/var/lib/kubelet/pods/99edfb42-ed13-471e-8e93-62ccafc5b190/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.910839 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2c0f88a-3106-4f9e-b41d-d9fa4542a24f" path="/var/lib/kubelet/pods/c2c0f88a-3106-4f9e-b41d-d9fa4542a24f/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.911942 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5499617-8645-4b0b-9b0e-5dbe617afc92" path="/var/lib/kubelet/pods/c5499617-8645-4b0b-9b0e-5dbe617afc92/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.912551 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672" path="/var/lib/kubelet/pods/c8ef4c24-7507-41c1-aa3c-4e2a9b8f6672/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.915871 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4af7ed9-fafa-46b2-87df-f482bed30a5a" path="/var/lib/kubelet/pods/d4af7ed9-fafa-46b2-87df-f482bed30a5a/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.916574 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9ba6a72-2550-4331-bdd0-12b192b5bc8a" path="/var/lib/kubelet/pods/d9ba6a72-2550-4331-bdd0-12b192b5bc8a/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.917355 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7b8982d-c79a-470e-a0b5-1a8c2e299993" path="/var/lib/kubelet/pods/f7b8982d-c79a-470e-a0b5-1a8c2e299993/volumes" Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.917980 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-slhq8"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.918095 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-sctg2"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.947859 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-slhq8"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.947938 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-sctg2"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.947951 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:30 crc kubenswrapper[4783]: I0930 13:58:30.947988 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8272-account-create-2x85s"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.020806 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8272-account-create-2x85s"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.044409 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.045179 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-9d8545ff7-pqd2t" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-httpd" containerID="cri-o://4a6acb631ade5965dc80487c2617529e44bce90e5a8b1f824ba3aef899ae630c" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.045679 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-9d8545ff7-pqd2t" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-server" containerID="cri-o://bc07330040a034c8f60b202b084c69f15e451000d8ccb782c009041fba8c604d" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.052854 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.084912 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.085447 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener-log" containerID="cri-o://7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.085840 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener" containerID="cri-o://7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.092692 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.093400 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-fb9cffd59-bwk45" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker" containerID="cri-o://433d915c50b0d185b319c45fc33233e3e3c0ab13a0ec0a6aef298225900bcb06" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.093361 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-fb9cffd59-bwk45" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker-log" containerID="cri-o://60da69babbe7c125d7aac96c30abeffc6a81804a11c64e08329bc23563951526" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.103481 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.103740 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d956c456d-krq7k" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api-log" containerID="cri-o://083c5d84dc5f5d23c7a9e1a3414dc17b7d8a4bbc02117dde2f658c66f9ee7b94" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.104129 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6d956c456d-krq7k" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api" containerID="cri-o://9df990f3d0bfc3752e7528f602f6eb747222541c98468b6d5a79a895a7dfc8ca" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.119820 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.120068 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" containerID="cri-o://95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.120517 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" containerID="cri-o://66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.130019 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.130275 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-log" containerID="cri-o://23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.130583 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-api" containerID="cri-o://a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.151079 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-qqwkt"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.197772 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-qqwkt"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.219091 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-rtbsj"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.237263 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-rtbsj"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.245608 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0854-account-create-4rpvb"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.254257 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0854-account-create-4rpvb"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.254413 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3e0048e0-a916-434d-abd4-571cec7d4b6a/ovn-northd/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.254482 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256537 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256572 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmgcd\" (UniqueName: \"kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256638 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256777 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256813 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.256845 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.257746 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-9w2wl_801ddf87-455e-4941-8637-4c2f5da49d41/openstack-network-exporter/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.257794 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.258430 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config" (OuterVolumeSpecName: "config") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.264784 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1d76-account-create-qvfvc"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.267173 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts" (OuterVolumeSpecName: "scripts") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.269523 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.272407 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.283770 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1d76-account-create-qvfvc"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.291055 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2a43-account-create-hjmwp"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.294968 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd" (OuterVolumeSpecName: "kube-api-access-nmgcd") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "kube-api-access-nmgcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.305727 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2a43-account-create-hjmwp"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.322804 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-r5r75"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.337004 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.346756 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.347004 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.356633 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-r5r75"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.360042 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir\") pod \"3e0048e0-a916-434d-abd4-571cec7d4b6a\" (UID: \"3e0048e0-a916-434d-abd4-571cec7d4b6a\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.363269 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.363374 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb7mh\" (UniqueName: \"kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.363505 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.364492 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.364631 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.364803 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgs4w\" (UniqueName: \"kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.364933 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.365047 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.365313 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.365503 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn\") pod \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\" (UID: \"8bc852c2-c59b-4b84-bbfc-c8b62354c66d\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.366161 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.366324 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.367742 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368307 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmgcd\" (UniqueName: \"kubernetes.io/projected/3e0048e0-a916-434d-abd4-571cec7d4b6a-kube-api-access-nmgcd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368410 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0048e0-a916-434d-abd4-571cec7d4b6a-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368513 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.365660 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run" (OuterVolumeSpecName: "var-run") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368329 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368366 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368939 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config" (OuterVolumeSpecName: "config") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.368969 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.369130 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts" (OuterVolumeSpecName: "scripts") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.369642 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.371838 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w" (OuterVolumeSpecName: "kube-api-access-wgs4w") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "kube-api-access-wgs4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.406010 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mlmst"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.406495 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh" (OuterVolumeSpecName: "kube-api-access-zb7mh") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "kube-api-access-zb7mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.415380 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.438748 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.450874 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="galera" containerID="cri-o://8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476027 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir\") pod \"801ddf87-455e-4941-8637-4c2f5da49d41\" (UID: \"801ddf87-455e-4941-8637-4c2f5da49d41\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476777 4783 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476792 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476800 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgs4w\" (UniqueName: \"kubernetes.io/projected/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-kube-api-access-wgs4w\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476809 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476818 4783 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476828 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801ddf87-455e-4941-8637-4c2f5da49d41-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476864 4783 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476874 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476881 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476889 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476898 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb7mh\" (UniqueName: \"kubernetes.io/projected/801ddf87-455e-4941-8637-4c2f5da49d41-kube-api-access-zb7mh\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.476968 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.493041 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mlmst"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.527015 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.527405 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerName="nova-cell1-conductor-conductor" containerID="cri-o://263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.529438 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.538571 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-67qfh"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.545813 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.546043 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="8f157470-67d0-452c-9959-a452400c02d7" containerName="nova-cell0-conductor-conductor" containerID="cri-o://48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" gracePeriod=30 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.558074 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-67qfh"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.570621 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "3e0048e0-a916-434d-abd4-571cec7d4b6a" (UID: "3e0048e0-a916-434d-abd4-571cec7d4b6a"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.573932 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.579931 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.579950 4783 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/801ddf87-455e-4941-8637-4c2f5da49d41-ovs-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.579959 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e0048e0-a916-434d-abd4-571cec7d4b6a-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: E0930 13:58:31.580038 4783 secret.go:188] Couldn't get secret openstack/placement-scripts: secret "placement-scripts" not found Sep 30 13:58:31 crc kubenswrapper[4783]: E0930 13:58:31.580081 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:33.580068334 +0000 UTC m=+1413.511534641 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-scripts" not found Sep 30 13:58:31 crc kubenswrapper[4783]: E0930 13:58:31.580507 4783 secret.go:188] Couldn't get secret openstack/placement-config-data: secret "placement-config-data" not found Sep 30 13:58:31 crc kubenswrapper[4783]: E0930 13:58:31.580573 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:33.58055654 +0000 UTC m=+1413.512022847 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-config-data" not found Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.590778 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.631465 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683122 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683251 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683322 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683491 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2m8z\" (UniqueName: \"kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683525 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.683565 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb\") pod \"284aafcd-4081-400f-a1c3-9992b3557fc1\" (UID: \"284aafcd-4081-400f-a1c3-9992b3557fc1\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.684168 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.691421 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.693727 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f7ef9466-e9f5-467e-9b43-2b7952e5b479/ovsdbserver-nb/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.693800 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.697320 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "8bc852c2-c59b-4b84-bbfc-c8b62354c66d" (UID: "8bc852c2-c59b-4b84-bbfc-c8b62354c66d"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.700996 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "801ddf87-455e-4941-8637-4c2f5da49d41" (UID: "801ddf87-455e-4941-8637-4c2f5da49d41"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.704016 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z" (OuterVolumeSpecName: "kube-api-access-f2m8z") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "kube-api-access-f2m8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.716755 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.764026 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c707a7c5-54fa-4430-8bbe-ac8eebbb0a59/ovsdbserver-sb/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.764127 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.789988 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790129 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790159 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790199 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790263 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret\") pod \"05290e3e-89c9-4073-96b6-e97a289f4431\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790298 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790402 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config\") pod \"05290e3e-89c9-4073-96b6-e97a289f4431\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790455 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc26w\" (UniqueName: \"kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790480 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw46f\" (UniqueName: \"kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790514 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle\") pod \"05290e3e-89c9-4073-96b6-e97a289f4431\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790553 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790574 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790605 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlmc5\" (UniqueName: \"kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5\") pod \"05290e3e-89c9-4073-96b6-e97a289f4431\" (UID: \"05290e3e-89c9-4073-96b6-e97a289f4431\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790703 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790750 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790797 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790827 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790846 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config\") pod \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\" (UID: \"f7ef9466-e9f5-467e-9b43-2b7952e5b479\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.790902 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle\") pod \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\" (UID: \"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59\") " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.791506 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts" (OuterVolumeSpecName: "scripts") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.792874 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.792900 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2m8z\" (UniqueName: \"kubernetes.io/projected/284aafcd-4081-400f-a1c3-9992b3557fc1-kube-api-access-f2m8z\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.792918 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/801ddf87-455e-4941-8637-4c2f5da49d41-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.792932 4783 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bc852c2-c59b-4b84-bbfc-c8b62354c66d-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.795017 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.795845 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config" (OuterVolumeSpecName: "config") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.798083 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts" (OuterVolumeSpecName: "scripts") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.803500 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config" (OuterVolumeSpecName: "config") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.803546 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.813209 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5" (OuterVolumeSpecName: "kube-api-access-hlmc5") pod "05290e3e-89c9-4073-96b6-e97a289f4431" (UID: "05290e3e-89c9-4073-96b6-e97a289f4431"). InnerVolumeSpecName "kube-api-access-hlmc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.828851 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f" (OuterVolumeSpecName: "kube-api-access-gw46f") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "kube-api-access-gw46f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.828916 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.828968 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.829152 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w" (OuterVolumeSpecName: "kube-api-access-vc26w") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "kube-api-access-vc26w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.846740 4783 generic.go:334] "Generic (PLEG): container finished" podID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerID="083c5d84dc5f5d23c7a9e1a3414dc17b7d8a4bbc02117dde2f658c66f9ee7b94" exitCode=143 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.846797 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerDied","Data":"083c5d84dc5f5d23c7a9e1a3414dc17b7d8a4bbc02117dde2f658c66f9ee7b94"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.853801 4783 generic.go:334] "Generic (PLEG): container finished" podID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerID="23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed" exitCode=143 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.853902 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerDied","Data":"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.875928 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f7ef9466-e9f5-467e-9b43-2b7952e5b479/ovsdbserver-nb/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.875983 4783 generic.go:334] "Generic (PLEG): container finished" podID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerID="723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8" exitCode=143 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.876037 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerDied","Data":"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.876067 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f7ef9466-e9f5-467e-9b43-2b7952e5b479","Type":"ContainerDied","Data":"423c2d5f5b8ee644c4e5bf433d96b66344cb5f5a915e05866fa7794d185349bf"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.876084 4783 scope.go:117] "RemoveContainer" containerID="77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.876243 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.890893 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-th6r6" event={"ID":"8bc852c2-c59b-4b84-bbfc-c8b62354c66d","Type":"ContainerDied","Data":"72db217779a4ed932fceb8bf2af004c40cab7a5267f018e136fe53a7f4b832f6"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.891423 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-th6r6" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894629 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894656 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894666 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc26w\" (UniqueName: \"kubernetes.io/projected/f7ef9466-e9f5-467e-9b43-2b7952e5b479-kube-api-access-vc26w\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894676 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw46f\" (UniqueName: \"kubernetes.io/projected/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-kube-api-access-gw46f\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894684 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894693 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894703 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlmc5\" (UniqueName: \"kubernetes.io/projected/05290e3e-89c9-4073-96b6-e97a289f4431-kube-api-access-hlmc5\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894711 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894724 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.894733 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7ef9466-e9f5-467e-9b43-2b7952e5b479-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.937521 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c707a7c5-54fa-4430-8bbe-ac8eebbb0a59/ovsdbserver-sb/0.log" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.937693 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.937741 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c707a7c5-54fa-4430-8bbe-ac8eebbb0a59","Type":"ContainerDied","Data":"4309f12cac37ee43c3a3e5a9252f2ea16a630cfb0d101c58b761a48bfbad750a"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.945386 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.195:6080/vnc_lite.html\": dial tcp 10.217.0.195:6080: connect: connection refused" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.945611 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder230c-account-delete-xk8kk" event={"ID":"5026d481-7d2b-40cd-8369-17892ed22c77","Type":"ContainerStarted","Data":"590b4b117617d0040ea749961df33659ed2c8046092ab8e8b116c291c1b8ab0f"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.947409 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.956670 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12a43-account-delete-wc5dr" event={"ID":"634bd6a4-be67-43db-b032-7e083edce6eb","Type":"ContainerStarted","Data":"700bcc87ce16152c38beaca72f1f414c574b14adbd8530a0f861a2694a9fbcb9"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.974163 4783 generic.go:334] "Generic (PLEG): container finished" podID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" exitCode=0 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.974241 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerDied","Data":"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.986450 4783 generic.go:334] "Generic (PLEG): container finished" podID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerID="40a405fda44ba184b836f4f22105f610a1f2f4078bd7ae78c09b94c9367d95c2" exitCode=143 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.986549 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerDied","Data":"40a405fda44ba184b836f4f22105f610a1f2f4078bd7ae78c09b94c9367d95c2"} Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.988903 4783 generic.go:334] "Generic (PLEG): container finished" podID="05290e3e-89c9-4073-96b6-e97a289f4431" containerID="8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4" exitCode=137 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.988965 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.997755 4783 generic.go:334] "Generic (PLEG): container finished" podID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerID="60da69babbe7c125d7aac96c30abeffc6a81804a11c64e08329bc23563951526" exitCode=143 Sep 30 13:58:31 crc kubenswrapper[4783]: I0930 13:58:31.997814 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fb9cffd59-bwk45" event={"ID":"2a19af6c-8b2e-41f3-ac68-012bd49e514b","Type":"ContainerDied","Data":"60da69babbe7c125d7aac96c30abeffc6a81804a11c64e08329bc23563951526"} Sep 30 13:58:32 crc kubenswrapper[4783]: W0930 13:58:32.001062 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36fb1123_03da_4b8c_b9b1_39caa412db70.slice/crio-20782121e23e86ae932e1e9c1312106047b018c6876a9e8a9ac53b50dedbdf89 WatchSource:0}: Error finding container 20782121e23e86ae932e1e9c1312106047b018c6876a9e8a9ac53b50dedbdf89: Status 404 returned error can't find the container with id 20782121e23e86ae932e1e9c1312106047b018c6876a9e8a9ac53b50dedbdf89 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.005728 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerID="8d42af40fdf6ba3fbff5a708098b5ffb7371fc3b476c8c5bc057e0dc5815233d" exitCode=143 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.005918 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerDied","Data":"8d42af40fdf6ba3fbff5a708098b5ffb7371fc3b476c8c5bc057e0dc5815233d"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.081245 4783 generic.go:334] "Generic (PLEG): container finished" podID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerID="7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a" exitCode=143 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.081336 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerDied","Data":"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.091427 4783 generic.go:334] "Generic (PLEG): container finished" podID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerID="bc07330040a034c8f60b202b084c69f15e451000d8ccb782c009041fba8c604d" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.091468 4783 generic.go:334] "Generic (PLEG): container finished" podID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerID="4a6acb631ade5965dc80487c2617529e44bce90e5a8b1f824ba3aef899ae630c" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.091517 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerDied","Data":"bc07330040a034c8f60b202b084c69f15e451000d8ccb782c009041fba8c604d"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.091550 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerDied","Data":"4a6acb631ade5965dc80487c2617529e44bce90e5a8b1f824ba3aef899ae630c"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098773 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098798 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098809 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098819 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098826 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098837 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098846 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098853 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098861 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098870 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098877 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098885 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098892 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098901 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098950 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098978 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.098989 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099000 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099010 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099021 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099031 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099040 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099050 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099062 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099072 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099081 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099091 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.099100 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.118986 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3e0048e0-a916-434d-abd4-571cec7d4b6a/ovn-northd/0.log" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.119124 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3e0048e0-a916-434d-abd4-571cec7d4b6a","Type":"ContainerDied","Data":"d59fffe18a7618daae0e434eeba0899a531be1e6a77a2a4f939e1058850e7f60"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.119875 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.121112 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.132303 4783 generic.go:334] "Generic (PLEG): container finished" podID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerID="e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.132371 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" event={"ID":"284aafcd-4081-400f-a1c3-9992b3557fc1","Type":"ContainerDied","Data":"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.132398 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" event={"ID":"284aafcd-4081-400f-a1c3-9992b3557fc1","Type":"ContainerDied","Data":"75f305b6d3b27d3d91d0441124a5fa6bb9515c64a7c940a27c2a4f75ef66bbb8"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.132471 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cc449b9dc-br2xm" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.136083 4783 generic.go:334] "Generic (PLEG): container finished" podID="050b08a6-64b8-4237-acfc-37711efa8361" containerID="9c00cf71bb8b3efbefb119c4700536e994f2ca1128db0c5280a9b57683983551" exitCode=143 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.136134 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerDied","Data":"9c00cf71bb8b3efbefb119c4700536e994f2ca1128db0c5280a9b57683983551"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.143559 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-9w2wl_801ddf87-455e-4941-8637-4c2f5da49d41/openstack-network-exporter/0.log" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.143628 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-9w2wl" event={"ID":"801ddf87-455e-4941-8637-4c2f5da49d41","Type":"ContainerDied","Data":"b20a30aecc57bec13d1844a1ed45f18452c8fbb06020939958b9c83740870e52"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.143722 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-9w2wl" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.158028 4783 generic.go:334] "Generic (PLEG): container finished" podID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerID="95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc" exitCode=143 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.158143 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerDied","Data":"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.173171 4783 generic.go:334] "Generic (PLEG): container finished" podID="08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" containerID="1d35034d0582de5860b8d159b9d4404746652a3756dedfa929760b8b9eda220d" exitCode=0 Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.173246 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronac9c-account-delete-9kkgx" event={"ID":"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf","Type":"ContainerDied","Data":"1d35034d0582de5860b8d159b9d4404746652a3756dedfa929760b8b9eda220d"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.185376 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement92eb-account-delete-tcr2d" event={"ID":"957739b9-90a0-43bf-a5a4-9558993b660f","Type":"ContainerStarted","Data":"553f2d8cb3dd202687c3bac0e30c9501481dca33b03cd1880e16ffad90d3138d"} Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.199614 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.201070 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.243472 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.313176 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.314482 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.314514 4783 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.314525 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.314580 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.314624 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data podName:b901a1db-0fb0-4d58-be99-fdfd812683e6 nodeName:}" failed. No retries permitted until 2025-09-30 13:58:36.314611008 +0000 UTC m=+1416.246077305 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data") pod "rabbitmq-cell1-server-0" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.323048 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.327882 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.336431 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config" (OuterVolumeSpecName: "config") pod "284aafcd-4081-400f-a1c3-9992b3557fc1" (UID: "284aafcd-4081-400f-a1c3-9992b3557fc1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.359174 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.359730 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.389035 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "05290e3e-89c9-4073-96b6-e97a289f4431" (UID: "05290e3e-89c9-4073-96b6-e97a289f4431"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.398633 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05290e3e-89c9-4073-96b6-e97a289f4431" (UID: "05290e3e-89c9-4073-96b6-e97a289f4431"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416349 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416384 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416397 4783 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416424 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416436 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/284aafcd-4081-400f-a1c3-9992b3557fc1-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416527 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416549 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.416629 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.461360 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.472673 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "f7ef9466-e9f5-467e-9b43-2b7952e5b479" (UID: "f7ef9466-e9f5-467e-9b43-2b7952e5b479"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.477633 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" (UID: "c707a7c5-54fa-4430-8bbe-ac8eebbb0a59"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.526045 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.526080 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7ef9466-e9f5-467e-9b43-2b7952e5b479-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.526090 4783 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.526099 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.532314 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "05290e3e-89c9-4073-96b6-e97a289f4431" (UID: "05290e3e-89c9-4073-96b6-e97a289f4431"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.608507 4783 scope.go:117] "RemoveContainer" containerID="723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.628052 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.628110 4783 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/05290e3e-89c9-4073-96b6-e97a289f4431-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.628140 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data podName:164c5743-32f5-4347-9c9d-20d28f1f2dce nodeName:}" failed. No retries permitted until 2025-09-30 13:58:36.628121143 +0000 UTC m=+1416.559587450 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data") pod "rabbitmq-server-0" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce") : configmap "rabbitmq-config-data" not found Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.644656 4783 scope.go:117] "RemoveContainer" containerID="77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.647631 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71\": container with ID starting with 77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71 not found: ID does not exist" containerID="77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.647704 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71"} err="failed to get container status \"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71\": rpc error: code = NotFound desc = could not find container \"77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71\": container with ID starting with 77f234927c319795b9d92d2d040555fe9d069b79314bf58ace80d9e625297b71 not found: ID does not exist" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.647757 4783 scope.go:117] "RemoveContainer" containerID="723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.648117 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8\": container with ID starting with 723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8 not found: ID does not exist" containerID="723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.648159 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8"} err="failed to get container status \"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8\": rpc error: code = NotFound desc = could not find container \"723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8\": container with ID starting with 723b0965694bde9a854686ff0198c23d4d50f4ee3872b0d79cd2c31ef8e522d8 not found: ID does not exist" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.648177 4783 scope.go:117] "RemoveContainer" containerID="78d4d2f406a94d848bdcacabbf7c6fb21ceabcfec8d6cbb5135e09f32718484b" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.746167 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.780968 4783 scope.go:117] "RemoveContainer" containerID="f963b067d2255b5af1022d6dd485fe3e8d7ab5715354c349f168effd5f9bdcd4" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.833613 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.833829 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.833919 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.833990 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.835059 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.835170 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.835210 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbxqz\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.835304 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.837863 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.838186 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.856727 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz" (OuterVolumeSpecName: "kube-api-access-dbxqz") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "kube-api-access-dbxqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.872726 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.882772 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05290e3e-89c9-4073-96b6-e97a289f4431" path="/var/lib/kubelet/pods/05290e3e-89c9-4073-96b6-e97a289f4431/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.883452 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25530b76-7a63-45b8-b096-492a37a7237d" path="/var/lib/kubelet/pods/25530b76-7a63-45b8-b096-492a37a7237d/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.884163 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="359bd4f5-9b93-470e-ab89-d9e05636adf0" path="/var/lib/kubelet/pods/359bd4f5-9b93-470e-ab89-d9e05636adf0/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.885071 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea" path="/var/lib/kubelet/pods/3c4ac1cd-e58e-4a82-91f3-8d6f2a9b7cea/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.886496 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="523d7b8c-32a8-4235-b665-b657176a8001" path="/var/lib/kubelet/pods/523d7b8c-32a8-4235-b665-b657176a8001/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.887301 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d9f067a-87ea-4ecf-8142-1e28d8d98574" path="/var/lib/kubelet/pods/5d9f067a-87ea-4ecf-8142-1e28d8d98574/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.888183 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c228744-0cbc-44ae-9539-7cd32f195543" path="/var/lib/kubelet/pods/7c228744-0cbc-44ae-9539-7cd32f195543/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.889447 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8655922a-c182-49c1-aa15-6f47bd279990" path="/var/lib/kubelet/pods/8655922a-c182-49c1-aa15-6f47bd279990/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.890350 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf9589b-c83a-44a3-943f-97739903c659" path="/var/lib/kubelet/pods/abf9589b-c83a-44a3-943f-97739903c659/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.890992 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c989ff95-97ef-4a67-af97-0359d59c5392" path="/var/lib/kubelet/pods/c989ff95-97ef-4a67-af97-0359d59c5392/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.891646 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb849f0-877d-4dc6-a589-9ae099eaac5e" path="/var/lib/kubelet/pods/ebb849f0-877d-4dc6-a589-9ae099eaac5e/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.893289 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec598217-7641-433b-938d-e2740a05a9e1" path="/var/lib/kubelet/pods/ec598217-7641-433b-938d-e2740a05a9e1/volumes" Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.920936 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle podName:ccf790ec-b4f7-4734-92a0-929ed51c08ec nodeName:}" failed. No retries permitted until 2025-09-30 13:58:33.420905289 +0000 UTC m=+1413.352371596 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec") : error deleting /var/lib/kubelet/pods/ccf790ec-b4f7-4734-92a0-929ed51c08ec/volume-subpaths: remove /var/lib/kubelet/pods/ccf790ec-b4f7-4734-92a0-929ed51c08ec/volume-subpaths: no such file or directory Sep 30 13:58:32 crc kubenswrapper[4783]: E0930 13:58:32.920977 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs podName:ccf790ec-b4f7-4734-92a0-929ed51c08ec nodeName:}" failed. No retries permitted until 2025-09-30 13:58:33.420969491 +0000 UTC m=+1413.352435798 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec") : error deleting /var/lib/kubelet/pods/ccf790ec-b4f7-4734-92a0-929ed51c08ec/volume-subpaths: remove /var/lib/kubelet/pods/ccf790ec-b4f7-4734-92a0-929ed51c08ec/volume-subpaths: no such file or directory Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.923964 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data" (OuterVolumeSpecName: "config-data") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.924856 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939692 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939724 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939743 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbxqz\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-kube-api-access-dbxqz\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939754 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939765 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccf790ec-b4f7-4734-92a0-929ed51c08ec-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:32 crc kubenswrapper[4783]: I0930 13:58:32.939775 4783 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ccf790ec-b4f7-4734-92a0-929ed51c08ec-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.033834 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.040214 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.062208 4783 scope.go:117] "RemoveContainer" containerID="fcc76b379c0fd4e6d39b37b60874fb17163db1dfdebc6baf9355f72a722f6cb8" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.065465 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-th6r6"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.119845 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.127611 4783 scope.go:117] "RemoveContainer" containerID="8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.152303 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxvcg\" (UniqueName: \"kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg\") pod \"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf\" (UID: \"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.153734 4783 scope.go:117] "RemoveContainer" containerID="8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.166598 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.167567 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4\": container with ID starting with 8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4 not found: ID does not exist" containerID="8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.167606 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4"} err="failed to get container status \"8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4\": rpc error: code = NotFound desc = could not find container \"8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4\": container with ID starting with 8499ab2b955764e172baad551be4e5d6185f96063acbf96197574d2eb2b928d4 not found: ID does not exist" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.167649 4783 scope.go:117] "RemoveContainer" containerID="ad940715dd3642761ec895a6d3116d512bb6c980dd421be308131af63b9114b0" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.182727 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-9w2wl"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.192486 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.196805 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg" (OuterVolumeSpecName: "kube-api-access-kxvcg") pod "08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" (UID: "08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf"). InnerVolumeSpecName "kube-api-access-kxvcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.208204 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cc449b9dc-br2xm"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.248301 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.249130 4783 generic.go:334] "Generic (PLEG): container finished" podID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerID="d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276" exitCode=0 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.251346 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerDied","Data":"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.254491 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1d76-account-delete-ncj8j" event={"ID":"36fb1123-03da-4b8c-b9b1-39caa412db70","Type":"ContainerStarted","Data":"20782121e23e86ae932e1e9c1312106047b018c6876a9e8a9ac53b50dedbdf89"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.258174 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle\") pod \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.258339 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data\") pod \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.258458 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs\") pod \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.258597 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs\") pod \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.258693 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjvss\" (UniqueName: \"kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss\") pod \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\" (UID: \"d61c8e26-064d-430a-8bb8-4e3c5e192d3a\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.259699 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxvcg\" (UniqueName: \"kubernetes.io/projected/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf-kube-api-access-kxvcg\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.274318 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.280288 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-9d8545ff7-pqd2t" event={"ID":"ccf790ec-b4f7-4734-92a0-929ed51c08ec","Type":"ContainerDied","Data":"b210b69fc0933c8dd94ec666e1cefeb578815c1f37e23b423f4a4668a7f7f6dc"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.280564 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-9d8545ff7-pqd2t" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.281970 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss" (OuterVolumeSpecName: "kube-api-access-qjvss") pod "d61c8e26-064d-430a-8bb8-4e3c5e192d3a" (UID: "d61c8e26-064d-430a-8bb8-4e3c5e192d3a"). InnerVolumeSpecName "kube-api-access-qjvss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.287697 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.289615 4783 scope.go:117] "RemoveContainer" containerID="35f523ca250ac79ac4541561752eaf87ea314b3758bd92036bb084eef35aa318" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.292053 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.310203 4783 generic.go:334] "Generic (PLEG): container finished" podID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerID="f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f" exitCode=0 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.310515 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.311680 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d61c8e26-064d-430a-8bb8-4e3c5e192d3a","Type":"ContainerDied","Data":"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.311727 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d61c8e26-064d-430a-8bb8-4e3c5e192d3a","Type":"ContainerDied","Data":"ee552533f4539c436189d186f03bfe95d3ee6b29574a4538a5ff66ebdbee79c1"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.320364 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d61c8e26-064d-430a-8bb8-4e3c5e192d3a" (UID: "d61c8e26-064d-430a-8bb8-4e3c5e192d3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.322846 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.332901 4783 generic.go:334] "Generic (PLEG): container finished" podID="5026d481-7d2b-40cd-8369-17892ed22c77" containerID="5583f7bb77f96137dbfbbe3a2cfb849fa76f09f327e0249ebad7f72e4e2cb2d5" exitCode=0 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.333157 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder230c-account-delete-xk8kk" event={"ID":"5026d481-7d2b-40cd-8369-17892ed22c77","Type":"ContainerDied","Data":"5583f7bb77f96137dbfbbe3a2cfb849fa76f09f327e0249ebad7f72e4e2cb2d5"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.339569 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data" (OuterVolumeSpecName: "config-data") pod "d61c8e26-064d-430a-8bb8-4e3c5e192d3a" (UID: "d61c8e26-064d-430a-8bb8-4e3c5e192d3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.348488 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.360590 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362148 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362322 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362404 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362477 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362541 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwj2k\" (UniqueName: \"kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362575 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362602 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.362688 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\" (UID: \"f1989fc2-d0ba-49ce-a488-589eaaaecb58\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.363262 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjvss\" (UniqueName: \"kubernetes.io/projected/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-kube-api-access-qjvss\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.363284 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.363300 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.364406 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.364591 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.364930 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.365151 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.366043 4783 generic.go:334] "Generic (PLEG): container finished" podID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerID="8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14" exitCode=0 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.366102 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerDied","Data":"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.366177 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.370654 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "d61c8e26-064d-430a-8bb8-4e3c5e192d3a" (UID: "d61c8e26-064d-430a-8bb8-4e3c5e192d3a"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.371290 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets" (OuterVolumeSpecName: "secrets") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.375325 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.377007 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronac9c-account-delete-9kkgx" event={"ID":"08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf","Type":"ContainerDied","Data":"dfe3dbd2d94f43f1b092a66251df301a932e8457f189a362a4eb7b300e79e3ff"} Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.377133 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronac9c-account-delete-9kkgx" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.378040 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k" (OuterVolumeSpecName: "kube-api-access-xwj2k") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "kube-api-access-xwj2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.390309 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "d61c8e26-064d-430a-8bb8-4e3c5e192d3a" (UID: "d61c8e26-064d-430a-8bb8-4e3c5e192d3a"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.409728 4783 scope.go:117] "RemoveContainer" containerID="e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.436907 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.447677 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.448095 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-central-agent" containerID="cri-o://cecbe25798bb861b6def416ee9dd34eed22000629d5be8de71c4a766d16e7b32" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.448633 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="proxy-httpd" containerID="cri-o://28abb173cd857cbb9d4a1c5a005edf6a644ce45b75ff79242a7e087b0bfe444f" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.448695 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="sg-core" containerID="cri-o://2b3fa9c5a1e21601eec7e4e1e10d5c915c51e4786e66caa45491f931fcfc9654" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.448740 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-notification-agent" containerID="cri-o://2a75fa3505609d0bdb1e679052bbfa1b815931211ba773373b4cf8cf6a6f8ced" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.451527 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "f1989fc2-d0ba-49ce-a488-589eaaaecb58" (UID: "f1989fc2-d0ba-49ce-a488-589eaaaecb58"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.468323 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.468401 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") pod \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\" (UID: \"ccf790ec-b4f7-4734-92a0-929ed51c08ec\") " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469146 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwj2k\" (UniqueName: \"kubernetes.io/projected/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kube-api-access-xwj2k\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469180 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469190 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469208 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469240 4783 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469251 4783 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469260 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469269 4783 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469277 4783 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1989fc2-d0ba-49ce-a488-589eaaaecb58-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469286 4783 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d61c8e26-064d-430a-8bb8-4e3c5e192d3a-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.469295 4783 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1989fc2-d0ba-49ce-a488-589eaaaecb58-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.473434 4783 scope.go:117] "RemoveContainer" containerID="dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.493465 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.493528 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.495447 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" containerName="kube-state-metrics" containerID="cri-o://1a2d3e8f3026d5fdf54bd68bc0e37a6babc472452cfbc73d9d47792a666e28be" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.509976 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.513891 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ccf790ec-b4f7-4734-92a0-929ed51c08ec" (UID: "ccf790ec-b4f7-4734-92a0-929ed51c08ec"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.547479 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.549761 4783 scope.go:117] "RemoveContainer" containerID="e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14" Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.550067 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14\": container with ID starting with e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14 not found: ID does not exist" containerID="e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.550093 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14"} err="failed to get container status \"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14\": rpc error: code = NotFound desc = could not find container \"e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14\": container with ID starting with e102327404b7740d70116bbb62e2fff0bd91616eda2f2ce2560b87e112578b14 not found: ID does not exist" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.550112 4783 scope.go:117] "RemoveContainer" containerID="dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f" Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.550338 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f\": container with ID starting with dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f not found: ID does not exist" containerID="dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.550370 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f"} err="failed to get container status \"dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f\": rpc error: code = NotFound desc = could not find container \"dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f\": container with ID starting with dd7d580824d1aa4e9f134f2e9bafeb6905e2e477dbcbfa9c2ccebb95f988046f not found: ID does not exist" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.550384 4783 scope.go:117] "RemoveContainer" containerID="25fabd726430aea0cdf79b31be6e26feb4aede29cf3f33ef9ba4f73371bf719a" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.566546 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronac9c-account-delete-9kkgx"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.572490 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.572516 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.572529 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccf790ec-b4f7-4734-92a0-929ed51c08ec-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.653626 4783 scope.go:117] "RemoveContainer" containerID="bc07330040a034c8f60b202b084c69f15e451000d8ccb782c009041fba8c604d" Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.674671 4783 secret.go:188] Couldn't get secret openstack/placement-config-data: secret "placement-config-data" not found Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.674746 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:37.674723934 +0000 UTC m=+1417.606190241 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-config-data" not found Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.674765 4783 secret.go:188] Couldn't get secret openstack/placement-scripts: secret "placement-scripts" not found Sep 30 13:58:33 crc kubenswrapper[4783]: E0930 13:58:33.674838 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts podName:6b97c668-20f4-48a9-a8ef-f5878e6aa23f nodeName:}" failed. No retries permitted until 2025-09-30 13:58:37.674817547 +0000 UTC m=+1417.606283854 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts") pod "placement-784897656b-2kp66" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f") : secret "placement-scripts" not found Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.682602 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-fqsdm"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.690554 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-fqsdm"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.709081 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.709706 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-84fcfd7bf5-qmzxl" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" containerName="keystone-api" containerID="cri-o://83c83549440f0e945a07ebe3f4406f83362c2602f75efd510a99b82f7f1c32fb" gracePeriod=30 Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.727969 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-b48f7"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.737456 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-b48f7"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.749124 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.761304 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-9d8545ff7-pqd2t"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.768263 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.771233 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.165:8776/healthcheck\": read tcp 10.217.0.2:40254->10.217.0.165:8776: read: connection reset by peer" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.773792 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.779417 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.786931 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gg4cw"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.791302 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gg4cw"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.796643 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b208-account-create-xqdqt"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.802546 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b208-account-create-xqdqt"] Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.835618 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.168:9292/healthcheck\": read tcp 10.217.0.2:59220->10.217.0.168:9292: read: connection reset by peer" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.835626 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.168:9292/healthcheck\": read tcp 10.217.0.2:59222->10.217.0.168:9292: read: connection reset by peer" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.910375 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.170:9292/healthcheck\": read tcp 10.217.0.2:56850->10.217.0.170:9292: read: connection reset by peer" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.910693 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.170:9292/healthcheck\": read tcp 10.217.0.2:56838->10.217.0.170:9292: read: connection reset by peer" Sep 30 13:58:33 crc kubenswrapper[4783]: I0930 13:58:33.936776 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" containerID="cri-o://5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" gracePeriod=30 Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.081543 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980 is running failed: container process not found" containerID="48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.081937 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980 is running failed: container process not found" containerID="48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.082192 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980 is running failed: container process not found" containerID="48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.082243 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="8f157470-67d0-452c-9959-a452400c02d7" containerName="nova-cell0-conductor-conductor" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.229603 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7c9bc45547-5grb6" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.171:9696/\": dial tcp 10.217.0.171:9696: connect: connection refused" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.269023 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6d956c456d-krq7k" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:36194->10.217.0.156:9311: read: connection reset by peer" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.269389 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6d956c456d-krq7k" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:36182->10.217.0.156:9311: read: connection reset by peer" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.404503 4783 generic.go:334] "Generic (PLEG): container finished" podID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerID="9df990f3d0bfc3752e7528f602f6eb747222541c98468b6d5a79a895a7dfc8ca" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.404575 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerDied","Data":"9df990f3d0bfc3752e7528f602f6eb747222541c98468b6d5a79a895a7dfc8ca"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.409029 4783 generic.go:334] "Generic (PLEG): container finished" podID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerID="8df62f6d21d21c10de4af33338f5e6aaa9331745b50e08b8d0f63b05fcdf0a2f" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.409123 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerDied","Data":"8df62f6d21d21c10de4af33338f5e6aaa9331745b50e08b8d0f63b05fcdf0a2f"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.416671 4783 generic.go:334] "Generic (PLEG): container finished" podID="36fb1123-03da-4b8c-b9b1-39caa412db70" containerID="53dedf835225a60e320e72cca0e9df9e7653e3c7f6942a6b8581cf0cb390b07b" exitCode=1 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.417502 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1d76-account-delete-ncj8j" event={"ID":"36fb1123-03da-4b8c-b9b1-39caa412db70","Type":"ContainerDied","Data":"53dedf835225a60e320e72cca0e9df9e7653e3c7f6942a6b8581cf0cb390b07b"} Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.449873 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.454072 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.454609 4783 generic.go:334] "Generic (PLEG): container finished" podID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" containerID="1a2d3e8f3026d5fdf54bd68bc0e37a6babc472452cfbc73d9d47792a666e28be" exitCode=2 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.454731 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f381dd59-999a-4cd2-8dc1-d0faea63df2c","Type":"ContainerDied","Data":"1a2d3e8f3026d5fdf54bd68bc0e37a6babc472452cfbc73d9d47792a666e28be"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.454774 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f381dd59-999a-4cd2-8dc1-d0faea63df2c","Type":"ContainerDied","Data":"c51e5dfa9f8eeb3a45ac6098eb9eb5a9817b6073ae4994d0e9dda1f3bda7900f"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.454787 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c51e5dfa9f8eeb3a45ac6098eb9eb5a9817b6073ae4994d0e9dda1f3bda7900f" Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.456405 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.456452 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.457183 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.458714 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.458970 4783 generic.go:334] "Generic (PLEG): container finished" podID="8f157470-67d0-452c-9959-a452400c02d7" containerID="48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.459028 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8f157470-67d0-452c-9959-a452400c02d7","Type":"ContainerDied","Data":"48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980"} Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.460785 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.460808 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.462205 4783 generic.go:334] "Generic (PLEG): container finished" podID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerID="6deab5bf48649d7f6437dbb5f0e0ebd19ad06c0737cc1e0e97eeb7f38e12f735" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.462376 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerDied","Data":"6deab5bf48649d7f6437dbb5f0e0ebd19ad06c0737cc1e0e97eeb7f38e12f735"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.465801 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f1989fc2-d0ba-49ce-a488-589eaaaecb58","Type":"ContainerDied","Data":"7ad050a85df332e98d323bde18a1d2de74abba3f9a6f236b61b32e08330992c9"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.468445 4783 generic.go:334] "Generic (PLEG): container finished" podID="957739b9-90a0-43bf-a5a4-9558993b660f" containerID="12298961917aa6225ca7beee2fab9ad3c7cd0e1f2d8b6f8e67b491469d5a5438" exitCode=1 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.468502 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement92eb-account-delete-tcr2d" event={"ID":"957739b9-90a0-43bf-a5a4-9558993b660f","Type":"ContainerDied","Data":"12298961917aa6225ca7beee2fab9ad3c7cd0e1f2d8b6f8e67b491469d5a5438"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473473 4783 generic.go:334] "Generic (PLEG): container finished" podID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerID="28abb173cd857cbb9d4a1c5a005edf6a644ce45b75ff79242a7e087b0bfe444f" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473496 4783 generic.go:334] "Generic (PLEG): container finished" podID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerID="2b3fa9c5a1e21601eec7e4e1e10d5c915c51e4786e66caa45491f931fcfc9654" exitCode=2 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473504 4783 generic.go:334] "Generic (PLEG): container finished" podID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerID="cecbe25798bb861b6def416ee9dd34eed22000629d5be8de71c4a766d16e7b32" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473549 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerDied","Data":"28abb173cd857cbb9d4a1c5a005edf6a644ce45b75ff79242a7e087b0bfe444f"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473573 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerDied","Data":"2b3fa9c5a1e21601eec7e4e1e10d5c915c51e4786e66caa45491f931fcfc9654"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.473584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerDied","Data":"cecbe25798bb861b6def416ee9dd34eed22000629d5be8de71c4a766d16e7b32"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.478652 4783 generic.go:334] "Generic (PLEG): container finished" podID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerID="069ecdff26e68e0d96f961dea1f277e66bd9d7eb17de82605d0a89f72c085c42" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.478689 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerDied","Data":"069ecdff26e68e0d96f961dea1f277e66bd9d7eb17de82605d0a89f72c085c42"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.483894 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder230c-account-delete-xk8kk" event={"ID":"5026d481-7d2b-40cd-8369-17892ed22c77","Type":"ContainerDied","Data":"590b4b117617d0040ea749961df33659ed2c8046092ab8e8b116c291c1b8ab0f"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.483942 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="590b4b117617d0040ea749961df33659ed2c8046092ab8e8b116c291c1b8ab0f" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.493951 4783 generic.go:334] "Generic (PLEG): container finished" podID="050b08a6-64b8-4237-acfc-37711efa8361" containerID="1bccf10c6c93de0d51e10e64262519a909d10f198ba045f898de5f0df6447a1d" exitCode=0 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.494028 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerDied","Data":"1bccf10c6c93de0d51e10e64262519a909d10f198ba045f898de5f0df6447a1d"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.496949 4783 generic.go:334] "Generic (PLEG): container finished" podID="634bd6a4-be67-43db-b032-7e083edce6eb" containerID="303f4522ae3111c4d058bda58fbde3804b8b897cc092933fec7940bb17ede686" exitCode=1 Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.496994 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12a43-account-delete-wc5dr" event={"ID":"634bd6a4-be67-43db-b032-7e083edce6eb","Type":"ContainerDied","Data":"303f4522ae3111c4d058bda58fbde3804b8b897cc092933fec7940bb17ede686"} Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.526834 4783 scope.go:117] "RemoveContainer" containerID="4a6acb631ade5965dc80487c2617529e44bce90e5a8b1f824ba3aef899ae630c" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.527732 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.557437 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.558800 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": read tcp 10.217.0.2:45904->10.217.0.202:8775: read: connection reset by peer" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.559140 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": read tcp 10.217.0.2:45890->10.217.0.202:8775: read: connection reset by peer" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.567781 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.572517 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.584309 4783 scope.go:117] "RemoveContainer" containerID="f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.595910 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config\") pod \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.595962 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs\") pod \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.596016 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pp4s\" (UniqueName: \"kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s\") pod \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.596040 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle\") pod \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\" (UID: \"f381dd59-999a-4cd2-8dc1-d0faea63df2c\") " Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.596070 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2nvg\" (UniqueName: \"kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg\") pod \"5026d481-7d2b-40cd-8369-17892ed22c77\" (UID: \"5026d481-7d2b-40cd-8369-17892ed22c77\") " Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.603566 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s" (OuterVolumeSpecName: "kube-api-access-9pp4s") pod "f381dd59-999a-4cd2-8dc1-d0faea63df2c" (UID: "f381dd59-999a-4cd2-8dc1-d0faea63df2c"). InnerVolumeSpecName "kube-api-access-9pp4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.603635 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg" (OuterVolumeSpecName: "kube-api-access-m2nvg") pod "5026d481-7d2b-40cd-8369-17892ed22c77" (UID: "5026d481-7d2b-40cd-8369-17892ed22c77"). InnerVolumeSpecName "kube-api-access-m2nvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.659955 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f381dd59-999a-4cd2-8dc1-d0faea63df2c" (UID: "f381dd59-999a-4cd2-8dc1-d0faea63df2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.667993 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "f381dd59-999a-4cd2-8dc1-d0faea63df2c" (UID: "f381dd59-999a-4cd2-8dc1-d0faea63df2c"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.668356 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "f381dd59-999a-4cd2-8dc1-d0faea63df2c" (UID: "f381dd59-999a-4cd2-8dc1-d0faea63df2c"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.686371 4783 scope.go:117] "RemoveContainer" containerID="f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f" Sep 30 13:58:34 crc kubenswrapper[4783]: E0930 13:58:34.690792 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f\": container with ID starting with f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f not found: ID does not exist" containerID="f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.690826 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f"} err="failed to get container status \"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f\": rpc error: code = NotFound desc = could not find container \"f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f\": container with ID starting with f58695968a9a08174e809fb09ab91645d04b3c0b2d605ec2f45eabb0375db90f not found: ID does not exist" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.690851 4783 scope.go:117] "RemoveContainer" containerID="8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.704548 4783 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.704583 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pp4s\" (UniqueName: \"kubernetes.io/projected/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-api-access-9pp4s\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.704595 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.704605 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2nvg\" (UniqueName: \"kubernetes.io/projected/5026d481-7d2b-40cd-8369-17892ed22c77-kube-api-access-m2nvg\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.704615 4783 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f381dd59-999a-4cd2-8dc1-d0faea63df2c-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.744486 4783 scope.go:117] "RemoveContainer" containerID="847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.783286 4783 scope.go:117] "RemoveContainer" containerID="1d35034d0582de5860b8d159b9d4404746652a3756dedfa929760b8b9eda220d" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.857810 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" path="/var/lib/kubelet/pods/08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.858859 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="262bb3ed-93d0-4389-a89f-c2b2fe5623e0" path="/var/lib/kubelet/pods/262bb3ed-93d0-4389-a89f-c2b2fe5623e0/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.859653 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" path="/var/lib/kubelet/pods/284aafcd-4081-400f-a1c3-9992b3557fc1/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.861116 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1" path="/var/lib/kubelet/pods/2ac4ffb5-f36c-4019-92ec-5aac5a0a2cc1/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.863846 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" path="/var/lib/kubelet/pods/3e0048e0-a916-434d-abd4-571cec7d4b6a/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.865753 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d" path="/var/lib/kubelet/pods/7eaa8e43-13e8-46b8-a4a9-1c8cb7a89b9d/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.867404 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="801ddf87-455e-4941-8637-4c2f5da49d41" path="/var/lib/kubelet/pods/801ddf87-455e-4941-8637-4c2f5da49d41/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.868652 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" path="/var/lib/kubelet/pods/8bc852c2-c59b-4b84-bbfc-c8b62354c66d/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.870581 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b12cead9-2793-46b5-8654-420df6d90f01" path="/var/lib/kubelet/pods/b12cead9-2793-46b5-8654-420df6d90f01/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.871350 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" path="/var/lib/kubelet/pods/c707a7c5-54fa-4430-8bbe-ac8eebbb0a59/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.872935 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" path="/var/lib/kubelet/pods/ccf790ec-b4f7-4734-92a0-929ed51c08ec/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.873780 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" path="/var/lib/kubelet/pods/d61c8e26-064d-430a-8bb8-4e3c5e192d3a/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.874519 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" path="/var/lib/kubelet/pods/f1989fc2-d0ba-49ce-a488-589eaaaecb58/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.875148 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" path="/var/lib/kubelet/pods/f7ef9466-e9f5-467e-9b43-2b7952e5b479/volumes" Sep 30 13:58:34 crc kubenswrapper[4783]: I0930 13:58:34.918403 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.000484 4783 scope.go:117] "RemoveContainer" containerID="8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14" Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.001106 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14\": container with ID starting with 8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14 not found: ID does not exist" containerID="8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.001156 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14"} err="failed to get container status \"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14\": rpc error: code = NotFound desc = could not find container \"8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14\": container with ID starting with 8b44a3225e80529e92402310c15ae3201738ad99fd4f25914d73c93731e3eb14 not found: ID does not exist" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.001187 4783 scope.go:117] "RemoveContainer" containerID="847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d" Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.001685 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d\": container with ID starting with 847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d not found: ID does not exist" containerID="847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.001730 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d"} err="failed to get container status \"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d\": rpc error: code = NotFound desc = could not find container \"847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d\": container with ID starting with 847d9d5dbc70a3f2b0e5bd8fb8cbe4e12e26e439bcef251ecfb39677c1e73e8d not found: ID does not exist" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.032010 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.034116 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.041487 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110383 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110407 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110445 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqt6r\" (UniqueName: \"kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r\") pod \"36fb1123-03da-4b8c-b9b1-39caa412db70\" (UID: \"36fb1123-03da-4b8c-b9b1-39caa412db70\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110481 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110529 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data\") pod \"8f157470-67d0-452c-9959-a452400c02d7\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110558 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110594 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qjjq\" (UniqueName: \"kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq\") pod \"8f157470-67d0-452c-9959-a452400c02d7\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110670 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mftgp\" (UniqueName: \"kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110727 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle\") pod \"8f157470-67d0-452c-9959-a452400c02d7\" (UID: \"8f157470-67d0-452c-9959-a452400c02d7\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.110805 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs\") pod \"1f741556-230b-409c-b9bd-d0dc1abbcd77\" (UID: \"1f741556-230b-409c-b9bd-d0dc1abbcd77\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.113790 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r" (OuterVolumeSpecName: "kube-api-access-xqt6r") pod "36fb1123-03da-4b8c-b9b1-39caa412db70" (UID: "36fb1123-03da-4b8c-b9b1-39caa412db70"). InnerVolumeSpecName "kube-api-access-xqt6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.116270 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs" (OuterVolumeSpecName: "logs") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.117514 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.125064 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp" (OuterVolumeSpecName: "kube-api-access-mftgp") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "kube-api-access-mftgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.125281 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq" (OuterVolumeSpecName: "kube-api-access-4qjjq") pod "8f157470-67d0-452c-9959-a452400c02d7" (UID: "8f157470-67d0-452c-9959-a452400c02d7"). InnerVolumeSpecName "kube-api-access-4qjjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.172450 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f157470-67d0-452c-9959-a452400c02d7" (UID: "8f157470-67d0-452c-9959-a452400c02d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.178510 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data" (OuterVolumeSpecName: "config-data") pod "8f157470-67d0-452c-9959-a452400c02d7" (UID: "8f157470-67d0-452c-9959-a452400c02d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.188299 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.202127 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.202970 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data" (OuterVolumeSpecName: "config-data") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214341 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mftgp\" (UniqueName: \"kubernetes.io/projected/1f741556-230b-409c-b9bd-d0dc1abbcd77-kube-api-access-mftgp\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214371 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214383 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214395 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214406 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214417 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqt6r\" (UniqueName: \"kubernetes.io/projected/36fb1123-03da-4b8c-b9b1-39caa412db70-kube-api-access-xqt6r\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214430 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f741556-230b-409c-b9bd-d0dc1abbcd77-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214441 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f157470-67d0-452c-9959-a452400c02d7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214452 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.214463 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qjjq\" (UniqueName: \"kubernetes.io/projected/8f157470-67d0-452c-9959-a452400c02d7-kube-api-access-4qjjq\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.229733 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1f741556-230b-409c-b9bd-d0dc1abbcd77" (UID: "1f741556-230b-409c-b9bd-d0dc1abbcd77"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.234200 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.241725 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.259462 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.274995 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.315857 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.315919 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.315985 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316021 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316095 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwv9q\" (UniqueName: \"kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q\") pod \"634bd6a4-be67-43db-b032-7e083edce6eb\" (UID: \"634bd6a4-be67-43db-b032-7e083edce6eb\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316111 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316129 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316144 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316170 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxrjn\" (UniqueName: \"kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316199 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfllx\" (UniqueName: \"kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx\") pod \"957739b9-90a0-43bf-a5a4-9558993b660f\" (UID: \"957739b9-90a0-43bf-a5a4-9558993b660f\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316254 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316274 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffgxp\" (UniqueName: \"kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316300 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316317 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316338 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316353 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316372 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316390 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom\") pod \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\" (UID: \"8fece54c-da0b-4cc0-b20d-b442b2fa73ed\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316408 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run\") pod \"050b08a6-64b8-4237-acfc-37711efa8361\" (UID: \"050b08a6-64b8-4237-acfc-37711efa8361\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316559 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316827 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.316835 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs" (OuterVolumeSpecName: "logs") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.317105 4783 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.317132 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f741556-230b-409c-b9bd-d0dc1abbcd77-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.317146 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.317159 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.317292 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs" (OuterVolumeSpecName: "logs") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.322543 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts" (OuterVolumeSpecName: "scripts") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.322642 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.322904 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts" (OuterVolumeSpecName: "scripts") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.322898 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx" (OuterVolumeSpecName: "kube-api-access-zfllx") pod "957739b9-90a0-43bf-a5a4-9558993b660f" (UID: "957739b9-90a0-43bf-a5a4-9558993b660f"). InnerVolumeSpecName "kube-api-access-zfllx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.324471 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q" (OuterVolumeSpecName: "kube-api-access-zwv9q") pod "634bd6a4-be67-43db-b032-7e083edce6eb" (UID: "634bd6a4-be67-43db-b032-7e083edce6eb"). InnerVolumeSpecName "kube-api-access-zwv9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.326007 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp" (OuterVolumeSpecName: "kube-api-access-ffgxp") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "kube-api-access-ffgxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.327339 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn" (OuterVolumeSpecName: "kube-api-access-xxrjn") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "kube-api-access-xxrjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.330479 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.367520 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.382281 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.384575 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.390884 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data" (OuterVolumeSpecName: "config-data") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.393456 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "050b08a6-64b8-4237-acfc-37711efa8361" (UID: "050b08a6-64b8-4237-acfc-37711efa8361"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.406628 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.407404 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data" (OuterVolumeSpecName: "config-data") pod "8fece54c-da0b-4cc0-b20d-b442b2fa73ed" (UID: "8fece54c-da0b-4cc0-b20d-b442b2fa73ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418349 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffgxp\" (UniqueName: \"kubernetes.io/projected/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-kube-api-access-ffgxp\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418375 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418384 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418392 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418403 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/050b08a6-64b8-4237-acfc-37711efa8361-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418423 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418432 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418439 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418447 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fece54c-da0b-4cc0-b20d-b442b2fa73ed-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418454 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418462 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwv9q\" (UniqueName: \"kubernetes.io/projected/634bd6a4-be67-43db-b032-7e083edce6eb-kube-api-access-zwv9q\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418470 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418477 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418489 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxrjn\" (UniqueName: \"kubernetes.io/projected/050b08a6-64b8-4237-acfc-37711efa8361-kube-api-access-xxrjn\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418497 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfllx\" (UniqueName: \"kubernetes.io/projected/957739b9-90a0-43bf-a5a4-9558993b660f-kube-api-access-zfllx\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.418505 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/050b08a6-64b8-4237-acfc-37711efa8361-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.433911 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.440174 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.442040 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.509823 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement92eb-account-delete-tcr2d" event={"ID":"957739b9-90a0-43bf-a5a4-9558993b660f","Type":"ContainerDied","Data":"553f2d8cb3dd202687c3bac0e30c9501481dca33b03cd1880e16ffad90d3138d"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.509868 4783 scope.go:117] "RemoveContainer" containerID="12298961917aa6225ca7beee2fab9ad3c7cd0e1f2d8b6f8e67b491469d5a5438" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.509881 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement92eb-account-delete-tcr2d" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.513645 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1d76-account-delete-ncj8j" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.513660 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1d76-account-delete-ncj8j" event={"ID":"36fb1123-03da-4b8c-b9b1-39caa412db70","Type":"ContainerDied","Data":"20782121e23e86ae932e1e9c1312106047b018c6876a9e8a9ac53b50dedbdf89"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518299 4783 generic.go:334] "Generic (PLEG): container finished" podID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerID="66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433" exitCode=0 Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518355 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerDied","Data":"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518378 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"de5783b8-dd5d-4570-ada8-5b1775a75813","Type":"ContainerDied","Data":"b4fa04f51c2a8427c20ea0732dd8d2f083b65b6a74df54fd584091c2a91a309a"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518439 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518899 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs\") pod \"de5783b8-dd5d-4570-ada8-5b1775a75813\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518936 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.518994 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r9ml\" (UniqueName: \"kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519019 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data\") pod \"de5783b8-dd5d-4570-ada8-5b1775a75813\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519085 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519105 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs\") pod \"de5783b8-dd5d-4570-ada8-5b1775a75813\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519150 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2tcr\" (UniqueName: \"kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr\") pod \"de5783b8-dd5d-4570-ada8-5b1775a75813\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519179 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519241 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519282 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle\") pod \"de5783b8-dd5d-4570-ada8-5b1775a75813\" (UID: \"de5783b8-dd5d-4570-ada8-5b1775a75813\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519314 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519334 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs\") pod \"2419c631-f6ff-431e-bb3b-2c3285eda678\" (UID: \"2419c631-f6ff-431e-bb3b-2c3285eda678\") " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.519677 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.520718 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.521073 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs" (OuterVolumeSpecName: "logs") pod "de5783b8-dd5d-4570-ada8-5b1775a75813" (UID: "de5783b8-dd5d-4570-ada8-5b1775a75813"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.522069 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs" (OuterVolumeSpecName: "logs") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.527650 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.527756 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts" (OuterVolumeSpecName: "scripts") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.533522 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr" (OuterVolumeSpecName: "kube-api-access-k2tcr") pod "de5783b8-dd5d-4570-ada8-5b1775a75813" (UID: "de5783b8-dd5d-4570-ada8-5b1775a75813"). InnerVolumeSpecName "kube-api-access-k2tcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.533590 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6d956c456d-krq7k" event={"ID":"1f741556-230b-409c-b9bd-d0dc1abbcd77","Type":"ContainerDied","Data":"971b451a27e2cbdaac041ab9142479750b02d192361ae0b52c946286309ed3e5"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.533636 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml" (OuterVolumeSpecName: "kube-api-access-7r9ml") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "kube-api-access-7r9ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.533708 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6d956c456d-krq7k" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.536761 4783 scope.go:117] "RemoveContainer" containerID="53dedf835225a60e320e72cca0e9df9e7653e3c7f6942a6b8581cf0cb390b07b" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.537519 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8f157470-67d0-452c-9959-a452400c02d7","Type":"ContainerDied","Data":"ef6c2c73e5a76bc989b713328ac39c4f0403911c01b172c51189e18f3e0e871c"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.537606 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.554441 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2419c631-f6ff-431e-bb3b-2c3285eda678","Type":"ContainerDied","Data":"9fd4a3f25a7ff5632270116d7ad9c058d001cd6c2d1a76df2b6e2b978af3aee1"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.554545 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.554894 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.559145 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"050b08a6-64b8-4237-acfc-37711efa8361","Type":"ContainerDied","Data":"2096da2284588dfdaeed1715bebfcb28123ee57319d9c0bb9e0959ff0768ccd2"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.559234 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.564986 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.566421 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de5783b8-dd5d-4570-ada8-5b1775a75813" (UID: "de5783b8-dd5d-4570-ada8-5b1775a75813"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.568880 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data" (OuterVolumeSpecName: "config-data") pod "de5783b8-dd5d-4570-ada8-5b1775a75813" (UID: "de5783b8-dd5d-4570-ada8-5b1775a75813"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.569179 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.569274 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8fece54c-da0b-4cc0-b20d-b442b2fa73ed","Type":"ContainerDied","Data":"59f4fc2f93b7de531fbd31d16570ccaf625421f9295fb39404a719fd0d51160f"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.569883 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement92eb-account-delete-tcr2d"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.576283 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data" (OuterVolumeSpecName: "config-data") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.577692 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder230c-account-delete-xk8kk" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.578351 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12a43-account-delete-wc5dr" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.578419 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12a43-account-delete-wc5dr" event={"ID":"634bd6a4-be67-43db-b032-7e083edce6eb","Type":"ContainerDied","Data":"700bcc87ce16152c38beaca72f1f414c574b14adbd8530a0f861a2694a9fbcb9"} Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.578604 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.585485 4783 scope.go:117] "RemoveContainer" containerID="66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.599561 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "de5783b8-dd5d-4570-ada8-5b1775a75813" (UID: "de5783b8-dd5d-4570-ada8-5b1775a75813"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.610664 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.610713 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2419c631-f6ff-431e-bb3b-2c3285eda678" (UID: "2419c631-f6ff-431e-bb3b-2c3285eda678"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.618373 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.619458 4783 scope.go:117] "RemoveContainer" containerID="95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620828 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620850 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r9ml\" (UniqueName: \"kubernetes.io/projected/2419c631-f6ff-431e-bb3b-2c3285eda678-kube-api-access-7r9ml\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620863 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620875 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620886 4783 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620896 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2tcr\" (UniqueName: \"kubernetes.io/projected/de5783b8-dd5d-4570-ada8-5b1775a75813-kube-api-access-k2tcr\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620907 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620919 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2419c631-f6ff-431e-bb3b-2c3285eda678-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620932 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de5783b8-dd5d-4570-ada8-5b1775a75813-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620941 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620952 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620962 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2419c631-f6ff-431e-bb3b-2c3285eda678-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.620971 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de5783b8-dd5d-4570-ada8-5b1775a75813-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.623660 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.624017 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.628331 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.628384 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerName="nova-cell1-conductor-conductor" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.632434 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.641339 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6d956c456d-krq7k"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.656892 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.662727 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.676104 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder230c-account-delete-xk8kk"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.692344 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.705382 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi1d76-account-delete-ncj8j"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.708554 4783 scope.go:117] "RemoveContainer" containerID="66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433" Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.709048 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433\": container with ID starting with 66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433 not found: ID does not exist" containerID="66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.709087 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433"} err="failed to get container status \"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433\": rpc error: code = NotFound desc = could not find container \"66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433\": container with ID starting with 66c8744ce1f77267319fe7155ecbf63253ea5719037e333e1d1c36b06e0ed433 not found: ID does not exist" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.709114 4783 scope.go:117] "RemoveContainer" containerID="95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc" Sep 30 13:58:35 crc kubenswrapper[4783]: E0930 13:58:35.709522 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc\": container with ID starting with 95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc not found: ID does not exist" containerID="95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.709547 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc"} err="failed to get container status \"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc\": rpc error: code = NotFound desc = could not find container \"95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc\": container with ID starting with 95317c362e4cc6f8f466aada85a08023d39d7a1431f188a48a81981454a121fc not found: ID does not exist" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.709560 4783 scope.go:117] "RemoveContainer" containerID="9df990f3d0bfc3752e7528f602f6eb747222541c98468b6d5a79a895a7dfc8ca" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.711612 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.719661 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.728329 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.729612 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.739175 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell12a43-account-delete-wc5dr"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.748091 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.759104 4783 scope.go:117] "RemoveContainer" containerID="083c5d84dc5f5d23c7a9e1a3414dc17b7d8a4bbc02117dde2f658c66f9ee7b94" Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.765873 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.773775 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.783576 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 13:58:35 crc kubenswrapper[4783]: I0930 13:58:35.803291 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.000855 4783 scope.go:117] "RemoveContainer" containerID="48d38b5c765562ca9d2dc56e100867eff5ee4e00fb09908ca53b0024ef270980" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.005762 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-784897656b-2kp66" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.008491 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.020367 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.025468 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.029765 4783 scope.go:117] "RemoveContainer" containerID="069ecdff26e68e0d96f961dea1f277e66bd9d7eb17de82605d0a89f72c085c42" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.033909 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034536 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034619 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncnbq\" (UniqueName: \"kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034702 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034802 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034848 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.034916 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.035006 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs\") pod \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\" (UID: \"6b97c668-20f4-48a9-a8ef-f5878e6aa23f\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.058787 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq" (OuterVolumeSpecName: "kube-api-access-ncnbq") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "kube-api-access-ncnbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.066111 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs" (OuterVolumeSpecName: "logs") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.076148 4783 scope.go:117] "RemoveContainer" containerID="40a405fda44ba184b836f4f22105f610a1f2f4078bd7ae78c09b94c9367d95c2" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.084606 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data" (OuterVolumeSpecName: "config-data") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.091123 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts" (OuterVolumeSpecName: "scripts") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.117401 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.137351 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.137381 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.137389 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.137399 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.137409 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncnbq\" (UniqueName: \"kubernetes.io/projected/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-kube-api-access-ncnbq\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.143393 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.161326 4783 scope.go:117] "RemoveContainer" containerID="1bccf10c6c93de0d51e10e64262519a909d10f198ba045f898de5f0df6447a1d" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.161958 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6b97c668-20f4-48a9-a8ef-f5878e6aa23f" (UID: "6b97c668-20f4-48a9-a8ef-f5878e6aa23f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.200814 4783 scope.go:117] "RemoveContainer" containerID="9c00cf71bb8b3efbefb119c4700536e994f2ca1128db0c5280a9b57683983551" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.222092 4783 scope.go:117] "RemoveContainer" containerID="6deab5bf48649d7f6437dbb5f0e0ebd19ad06c0737cc1e0e97eeb7f38e12f735" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.238629 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.238658 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b97c668-20f4-48a9-a8ef-f5878e6aa23f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.253947 4783 scope.go:117] "RemoveContainer" containerID="117dc1758063264abe9e841b11bdaac117d677993965b0ae1f6a1d411ffb2d13" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.269892 4783 scope.go:117] "RemoveContainer" containerID="303f4522ae3111c4d058bda58fbde3804b8b897cc092933fec7940bb17ede686" Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.339785 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.340149 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data podName:b901a1db-0fb0-4d58-be99-fdfd812683e6 nodeName:}" failed. No retries permitted until 2025-09-30 13:58:44.340128029 +0000 UTC m=+1424.271594336 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data") pod "rabbitmq-cell1-server-0" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6") : configmap "rabbitmq-cell1-config-data" not found Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.557848 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.613130 4783 generic.go:334] "Generic (PLEG): container finished" podID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerID="a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9" exitCode=0 Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.613280 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.613630 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerDied","Data":"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9"} Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.613704 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"990cfb5a-6508-4344-9df7-391f55a70bd8","Type":"ContainerDied","Data":"7ca31d61a9efdb8c98cca81ee99089ae9fa0855a0d0600b941c46d33ddf7b793"} Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.613726 4783 scope.go:117] "RemoveContainer" containerID="a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.616676 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-784897656b-2kp66" event={"ID":"6b97c668-20f4-48a9-a8ef-f5878e6aa23f","Type":"ContainerDied","Data":"480a200cdc697c8ebf1abd5b620ac48b7758d3e56bb2816ab4f7b5cd9d38f624"} Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.616768 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-784897656b-2kp66" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.641465 4783 scope.go:117] "RemoveContainer" containerID="23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.645936 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.645999 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.646037 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkrvv\" (UniqueName: \"kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.646119 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.646193 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.646234 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data\") pod \"990cfb5a-6508-4344-9df7-391f55a70bd8\" (UID: \"990cfb5a-6508-4344-9df7-391f55a70bd8\") " Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.646690 4783 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.646752 4783 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data podName:164c5743-32f5-4347-9c9d-20d28f1f2dce nodeName:}" failed. No retries permitted until 2025-09-30 13:58:44.64673493 +0000 UTC m=+1424.578201237 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data") pod "rabbitmq-server-0" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce") : configmap "rabbitmq-config-data" not found Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.649716 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.650210 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs" (OuterVolumeSpecName: "logs") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.653800 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-784897656b-2kp66"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.661999 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv" (OuterVolumeSpecName: "kube-api-access-qkrvv") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "kube-api-access-qkrvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.663992 4783 scope.go:117] "RemoveContainer" containerID="a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9" Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.664467 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9\": container with ID starting with a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9 not found: ID does not exist" containerID="a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.664590 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9"} err="failed to get container status \"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9\": rpc error: code = NotFound desc = could not find container \"a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9\": container with ID starting with a2bad418a29881b7122fefd4cd227ec191c29a702e6757d67cb0615fa138b8f9 not found: ID does not exist" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.664721 4783 scope.go:117] "RemoveContainer" containerID="23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed" Sep 30 13:58:36 crc kubenswrapper[4783]: E0930 13:58:36.665463 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed\": container with ID starting with 23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed not found: ID does not exist" containerID="23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.665507 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed"} err="failed to get container status \"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed\": rpc error: code = NotFound desc = could not find container \"23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed\": container with ID starting with 23388cedb834ec06780dc2a580b02b9fda46fcb79fc7f70e7cc73f244c70f5ed not found: ID does not exist" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.665537 4783 scope.go:117] "RemoveContainer" containerID="8df62f6d21d21c10de4af33338f5e6aaa9331745b50e08b8d0f63b05fcdf0a2f" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.678806 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data" (OuterVolumeSpecName: "config-data") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.679046 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.686484 4783 scope.go:117] "RemoveContainer" containerID="8d42af40fdf6ba3fbff5a708098b5ffb7371fc3b476c8c5bc057e0dc5815233d" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.702913 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.727514 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "990cfb5a-6508-4344-9df7-391f55a70bd8" (UID: "990cfb5a-6508-4344-9df7-391f55a70bd8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748469 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748512 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkrvv\" (UniqueName: \"kubernetes.io/projected/990cfb5a-6508-4344-9df7-391f55a70bd8-kube-api-access-qkrvv\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748527 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748538 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990cfb5a-6508-4344-9df7-391f55a70bd8-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748552 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.748563 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/990cfb5a-6508-4344-9df7-391f55a70bd8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.852829 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="050b08a6-64b8-4237-acfc-37711efa8361" path="/var/lib/kubelet/pods/050b08a6-64b8-4237-acfc-37711efa8361/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.853845 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" path="/var/lib/kubelet/pods/1f741556-230b-409c-b9bd-d0dc1abbcd77/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.854876 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" path="/var/lib/kubelet/pods/2419c631-f6ff-431e-bb3b-2c3285eda678/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.856345 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36fb1123-03da-4b8c-b9b1-39caa412db70" path="/var/lib/kubelet/pods/36fb1123-03da-4b8c-b9b1-39caa412db70/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.856875 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5026d481-7d2b-40cd-8369-17892ed22c77" path="/var/lib/kubelet/pods/5026d481-7d2b-40cd-8369-17892ed22c77/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.857424 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="634bd6a4-be67-43db-b032-7e083edce6eb" path="/var/lib/kubelet/pods/634bd6a4-be67-43db-b032-7e083edce6eb/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.860056 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" path="/var/lib/kubelet/pods/6b97c668-20f4-48a9-a8ef-f5878e6aa23f/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.861464 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f157470-67d0-452c-9959-a452400c02d7" path="/var/lib/kubelet/pods/8f157470-67d0-452c-9959-a452400c02d7/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.862768 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" path="/var/lib/kubelet/pods/8fece54c-da0b-4cc0-b20d-b442b2fa73ed/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.864086 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="957739b9-90a0-43bf-a5a4-9558993b660f" path="/var/lib/kubelet/pods/957739b9-90a0-43bf-a5a4-9558993b660f/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.864742 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" path="/var/lib/kubelet/pods/de5783b8-dd5d-4570-ada8-5b1775a75813/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.865417 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" path="/var/lib/kubelet/pods/f381dd59-999a-4cd2-8dc1-d0faea63df2c/volumes" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.911030 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/keystone-84fcfd7bf5-qmzxl" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.0.150:5000/v3\": read tcp 10.217.0.2:35016->10.217.0.150:5000: read: connection reset by peer" Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.933547 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:58:36 crc kubenswrapper[4783]: I0930 13:58:36.941669 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.485246 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.486628 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.487778 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.487810 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.556311 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.565900 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.599977 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.664448 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665251 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665419 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj6g8\" (UniqueName: \"kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8\") pod \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665522 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665613 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665712 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665795 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.665927 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckr6l\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666016 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom\") pod \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666117 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs\") pod \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666265 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666375 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data\") pod \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666505 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666621 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666722 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666815 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq4ll\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.666914 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667017 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667119 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667214 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667324 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667414 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667519 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667608 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667689 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b901a1db-0fb0-4d58-be99-fdfd812683e6\" (UID: \"b901a1db-0fb0-4d58-be99-fdfd812683e6\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667743 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667826 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf\") pod \"164c5743-32f5-4347-9c9d-20d28f1f2dce\" (UID: \"164c5743-32f5-4347-9c9d-20d28f1f2dce\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.667923 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle\") pod \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\" (UID: \"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.668354 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs" (OuterVolumeSpecName: "logs") pod "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" (UID: "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.668499 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.668571 4783 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.668775 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.674970 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.675705 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.676748 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.676811 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.676858 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.677495 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.677550 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd" gracePeriod=600 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.682263 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.687595 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.689383 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info" (OuterVolumeSpecName: "pod-info") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.693011 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.705111 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.708942 4783 generic.go:334] "Generic (PLEG): container finished" podID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerID="2a75fa3505609d0bdb1e679052bbfa1b815931211ba773373b4cf8cf6a6f8ced" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.709038 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerDied","Data":"2a75fa3505609d0bdb1e679052bbfa1b815931211ba773373b4cf8cf6a6f8ced"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.709339 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.709745 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.722901 4783 generic.go:334] "Generic (PLEG): container finished" podID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerID="7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.722961 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerDied","Data":"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.722986 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" event={"ID":"3d12c07d-16a5-47c8-94af-fc04a4c0ba9d","Type":"ContainerDied","Data":"4f83f9799c9d4c825935802dd1aca89dedb70560c22c76d5f67113a5905b98c8"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.723008 4783 scope.go:117] "RemoveContainer" containerID="7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.723102 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-95bcf9466-5g2ds" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.726378 4783 generic.go:334] "Generic (PLEG): container finished" podID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerID="263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.726470 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"563b20bf-7587-442c-86c5-1cbb179a2bf6","Type":"ContainerDied","Data":"263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.727393 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8" (OuterVolumeSpecName: "kube-api-access-xj6g8") pod "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" (UID: "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d"). InnerVolumeSpecName "kube-api-access-xj6g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.727720 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.734430 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.734512 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l" (OuterVolumeSpecName: "kube-api-access-ckr6l") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "kube-api-access-ckr6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.735509 4783 generic.go:334] "Generic (PLEG): container finished" podID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerID="433d915c50b0d185b319c45fc33233e3e3c0ab13a0ec0a6aef298225900bcb06" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.735569 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-fb9cffd59-bwk45" event={"ID":"2a19af6c-8b2e-41f3-ac68-012bd49e514b","Type":"ContainerDied","Data":"433d915c50b0d185b319c45fc33233e3e3c0ab13a0ec0a6aef298225900bcb06"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.735602 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-fb9cffd59-bwk45" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.739720 4783 generic.go:334] "Generic (PLEG): container finished" podID="fdd4645f-8430-40ad-9539-663a01c74c13" containerID="83c83549440f0e945a07ebe3f4406f83362c2602f75efd510a99b82f7f1c32fb" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.739811 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84fcfd7bf5-qmzxl" event={"ID":"fdd4645f-8430-40ad-9539-663a01c74c13","Type":"ContainerDied","Data":"83c83549440f0e945a07ebe3f4406f83362c2602f75efd510a99b82f7f1c32fb"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.744382 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" (UID: "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.745323 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll" (OuterVolumeSpecName: "kube-api-access-cq4ll") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "kube-api-access-cq4ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.748443 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info" (OuterVolumeSpecName: "pod-info") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.757827 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.759312 4783 scope.go:117] "RemoveContainer" containerID="7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.762780 4783 generic.go:334] "Generic (PLEG): container finished" podID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerID="2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.762881 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerDied","Data":"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.762933 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b901a1db-0fb0-4d58-be99-fdfd812683e6","Type":"ContainerDied","Data":"ef4a96baff801791ce5d6008155172f208e1d40f394f83e020f4fde86d71417c"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.763025 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.769195 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gd7v2\" (UniqueName: \"kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2\") pod \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.769326 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom\") pod \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.769398 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs\") pod \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.769559 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle\") pod \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.769603 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data\") pod \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\" (UID: \"2a19af6c-8b2e-41f3-ac68-012bd49e514b\") " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.770098 4783 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b901a1db-0fb0-4d58-be99-fdfd812683e6-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771043 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771057 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckr6l\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-kube-api-access-ckr6l\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771082 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771094 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771103 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771113 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq4ll\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-kube-api-access-cq4ll\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771125 4783 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/164c5743-32f5-4347-9c9d-20d28f1f2dce-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771135 4783 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/164c5743-32f5-4347-9c9d-20d28f1f2dce-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771164 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771176 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771187 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771206 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771231 4783 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771245 4783 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b901a1db-0fb0-4d58-be99-fdfd812683e6-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771266 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj6g8\" (UniqueName: \"kubernetes.io/projected/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-kube-api-access-xj6g8\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.771277 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.774306 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs" (OuterVolumeSpecName: "logs") pod "2a19af6c-8b2e-41f3-ac68-012bd49e514b" (UID: "2a19af6c-8b2e-41f3-ac68-012bd49e514b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.779883 4783 generic.go:334] "Generic (PLEG): container finished" podID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerID="fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998" exitCode=0 Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.779930 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerDied","Data":"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.779957 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"164c5743-32f5-4347-9c9d-20d28f1f2dce","Type":"ContainerDied","Data":"db571d3067270dbf8cbe55cc5436507182e78f1ae3928b3b060ffdd16a212abf"} Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.780021 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.793268 4783 scope.go:117] "RemoveContainer" containerID="7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222" Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.793864 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222\": container with ID starting with 7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222 not found: ID does not exist" containerID="7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.793895 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222"} err="failed to get container status \"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222\": rpc error: code = NotFound desc = could not find container \"7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222\": container with ID starting with 7cff1c12b1b0b2c4dcf219452ffc056adaceae66605a198cc4bdd76b90770222 not found: ID does not exist" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.793914 4783 scope.go:117] "RemoveContainer" containerID="7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.794060 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2" (OuterVolumeSpecName: "kube-api-access-gd7v2") pod "2a19af6c-8b2e-41f3-ac68-012bd49e514b" (UID: "2a19af6c-8b2e-41f3-ac68-012bd49e514b"). InnerVolumeSpecName "kube-api-access-gd7v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: E0930 13:58:37.794315 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a\": container with ID starting with 7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a not found: ID does not exist" containerID="7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.794353 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a"} err="failed to get container status \"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a\": rpc error: code = NotFound desc = could not find container \"7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a\": container with ID starting with 7fbe807e4a69a2c6466e2d4a52f57888f0a5143866c142a93efd093f14764a7a not found: ID does not exist" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.794372 4783 scope.go:117] "RemoveContainer" containerID="433d915c50b0d185b319c45fc33233e3e3c0ab13a0ec0a6aef298225900bcb06" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.797421 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2a19af6c-8b2e-41f3-ac68-012bd49e514b" (UID: "2a19af6c-8b2e-41f3-ac68-012bd49e514b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.816152 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data" (OuterVolumeSpecName: "config-data") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.819850 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data" (OuterVolumeSpecName: "config-data") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.828363 4783 scope.go:117] "RemoveContainer" containerID="60da69babbe7c125d7aac96c30abeffc6a81804a11c64e08329bc23563951526" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.828766 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.837614 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data" (OuterVolumeSpecName: "config-data") pod "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" (UID: "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.854543 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" (UID: "3d12c07d-16a5-47c8-94af-fc04a4c0ba9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.855618 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.858001 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf" (OuterVolumeSpecName: "server-conf") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897400 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897429 4783 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a19af6c-8b2e-41f3-ac68-012bd49e514b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897482 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897496 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897508 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897519 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gd7v2\" (UniqueName: \"kubernetes.io/projected/2a19af6c-8b2e-41f3-ac68-012bd49e514b-kube-api-access-gd7v2\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897534 4783 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897545 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897555 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b901a1db-0fb0-4d58-be99-fdfd812683e6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.897566 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.904389 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data" (OuterVolumeSpecName: "config-data") pod "2a19af6c-8b2e-41f3-ac68-012bd49e514b" (UID: "2a19af6c-8b2e-41f3-ac68-012bd49e514b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.919717 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf" (OuterVolumeSpecName: "server-conf") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.930416 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a19af6c-8b2e-41f3-ac68-012bd49e514b" (UID: "2a19af6c-8b2e-41f3-ac68-012bd49e514b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.972419 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "164c5743-32f5-4347-9c9d-20d28f1f2dce" (UID: "164c5743-32f5-4347-9c9d-20d28f1f2dce"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.974466 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b901a1db-0fb0-4d58-be99-fdfd812683e6" (UID: "b901a1db-0fb0-4d58-be99-fdfd812683e6"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.999443 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/164c5743-32f5-4347-9c9d-20d28f1f2dce-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.999469 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b901a1db-0fb0-4d58-be99-fdfd812683e6-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.999479 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.999488 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a19af6c-8b2e-41f3-ac68-012bd49e514b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:37 crc kubenswrapper[4783]: I0930 13:58:37.999496 4783 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/164c5743-32f5-4347-9c9d-20d28f1f2dce-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.033612 4783 scope.go:117] "RemoveContainer" containerID="2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.097349 4783 scope.go:117] "RemoveContainer" containerID="7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.100941 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.117173 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.120071 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.127580 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.132620 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-fb9cffd59-bwk45"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.139081 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.145940 4783 scope.go:117] "RemoveContainer" containerID="2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d" Sep 30 13:58:38 crc kubenswrapper[4783]: E0930 13:58:38.151613 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d\": container with ID starting with 2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d not found: ID does not exist" containerID="2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.151660 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d"} err="failed to get container status \"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d\": rpc error: code = NotFound desc = could not find container \"2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d\": container with ID starting with 2c450bff4273d74a9d160832b6ebe969916f4c399367beb3955bf0e5e42fed9d not found: ID does not exist" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.151692 4783 scope.go:117] "RemoveContainer" containerID="7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.151617 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: E0930 13:58:38.153595 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325\": container with ID starting with 7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325 not found: ID does not exist" containerID="7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.153649 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325"} err="failed to get container status \"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325\": rpc error: code = NotFound desc = could not find container \"7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325\": container with ID starting with 7599420c31f154f2821fb28904486c049a6d33032a582e1929d2de130e1a7325 not found: ID does not exist" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.153674 4783 scope.go:117] "RemoveContainer" containerID="fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.163881 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.170182 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.175488 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.176290 4783 scope.go:117] "RemoveContainer" containerID="ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.179995 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-95bcf9466-5g2ds"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206371 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206424 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206552 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206593 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frktz\" (UniqueName: \"kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206619 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvfw7\" (UniqueName: \"kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7\") pod \"563b20bf-7587-442c-86c5-1cbb179a2bf6\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206646 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206682 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206740 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206769 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206817 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206845 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206866 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206894 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206916 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data\") pod \"563b20bf-7587-442c-86c5-1cbb179a2bf6\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206952 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle\") pod \"563b20bf-7587-442c-86c5-1cbb179a2bf6\" (UID: \"563b20bf-7587-442c-86c5-1cbb179a2bf6\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.206983 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.207019 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs\") pod \"fdd4645f-8430-40ad-9539-663a01c74c13\" (UID: \"fdd4645f-8430-40ad-9539-663a01c74c13\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.207046 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkfzl\" (UniqueName: \"kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.207076 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data\") pod \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\" (UID: \"01c79a8c-fb3e-4675-8f73-8e7916e746cc\") " Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.215607 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.218513 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts" (OuterVolumeSpecName: "scripts") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.221766 4783 scope.go:117] "RemoveContainer" containerID="fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.224632 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz" (OuterVolumeSpecName: "kube-api-access-frktz") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "kube-api-access-frktz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.224666 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.224696 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7" (OuterVolumeSpecName: "kube-api-access-dvfw7") pod "563b20bf-7587-442c-86c5-1cbb179a2bf6" (UID: "563b20bf-7587-442c-86c5-1cbb179a2bf6"). InnerVolumeSpecName "kube-api-access-dvfw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: E0930 13:58:38.224769 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998\": container with ID starting with fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998 not found: ID does not exist" containerID="fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.224847 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998"} err="failed to get container status \"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998\": rpc error: code = NotFound desc = could not find container \"fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998\": container with ID starting with fde77ef5d7a9cf8cd4dc2107f4da9a25122e2181ab7101e04370f5e2b4e9c998 not found: ID does not exist" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.224879 4783 scope.go:117] "RemoveContainer" containerID="ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.230957 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl" (OuterVolumeSpecName: "kube-api-access-xkfzl") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "kube-api-access-xkfzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.231066 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts" (OuterVolumeSpecName: "scripts") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.231316 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: E0930 13:58:38.237972 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b\": container with ID starting with ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b not found: ID does not exist" containerID="ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.238086 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b"} err="failed to get container status \"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b\": rpc error: code = NotFound desc = could not find container \"ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b\": container with ID starting with ff01f7f8cb2c149281e4623522e0bee054923bab6c79a1fd04884e785e56859b not found: ID does not exist" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.246161 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.265923 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.266452 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.268723 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data" (OuterVolumeSpecName: "config-data") pod "563b20bf-7587-442c-86c5-1cbb179a2bf6" (UID: "563b20bf-7587-442c-86c5-1cbb179a2bf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.271352 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "563b20bf-7587-442c-86c5-1cbb179a2bf6" (UID: "563b20bf-7587-442c-86c5-1cbb179a2bf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.281307 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.284860 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data" (OuterVolumeSpecName: "config-data") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.287131 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fdd4645f-8430-40ad-9539-663a01c74c13" (UID: "fdd4645f-8430-40ad-9539-663a01c74c13"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.293739 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.303704 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data" (OuterVolumeSpecName: "config-data") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309287 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309320 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309424 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkfzl\" (UniqueName: \"kubernetes.io/projected/01c79a8c-fb3e-4675-8f73-8e7916e746cc-kube-api-access-xkfzl\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309439 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309432 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01c79a8c-fb3e-4675-8f73-8e7916e746cc" (UID: "01c79a8c-fb3e-4675-8f73-8e7916e746cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309453 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309515 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309527 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309539 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frktz\" (UniqueName: \"kubernetes.io/projected/fdd4645f-8430-40ad-9539-663a01c74c13-kube-api-access-frktz\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309550 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvfw7\" (UniqueName: \"kubernetes.io/projected/563b20bf-7587-442c-86c5-1cbb179a2bf6-kube-api-access-dvfw7\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309561 4783 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309571 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309582 4783 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309603 4783 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdd4645f-8430-40ad-9539-663a01c74c13-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309611 4783 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/01c79a8c-fb3e-4675-8f73-8e7916e746cc-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309620 4783 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309629 4783 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309637 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.309647 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/563b20bf-7587-442c-86c5-1cbb179a2bf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.411330 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c79a8c-fb3e-4675-8f73-8e7916e746cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.789607 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd" exitCode=0 Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.789688 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd"} Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.789948 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740"} Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.789968 4783 scope.go:117] "RemoveContainer" containerID="a7cb9b97e41dd6ebeb85ae9ef9261c774b500007acfc3bb801dd53ce003a26e8" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.797453 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"01c79a8c-fb3e-4675-8f73-8e7916e746cc","Type":"ContainerDied","Data":"208d11a19d6ff72d9456dde76c744b6abb4da2bff39922e8b3720512d28531ab"} Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.797515 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.799091 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"563b20bf-7587-442c-86c5-1cbb179a2bf6","Type":"ContainerDied","Data":"355342254b8f408d1a01efe785631003dd502026b7bb55b8dd41f5156e76c055"} Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.799141 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.802281 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84fcfd7bf5-qmzxl" event={"ID":"fdd4645f-8430-40ad-9539-663a01c74c13","Type":"ContainerDied","Data":"61dda0969a0bd1ae539d326392eba358f098cad90af63c593dc98f7020813d11"} Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.802357 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84fcfd7bf5-qmzxl" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.854788 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" path="/var/lib/kubelet/pods/164c5743-32f5-4347-9c9d-20d28f1f2dce/volumes" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.855682 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" path="/var/lib/kubelet/pods/2a19af6c-8b2e-41f3-ac68-012bd49e514b/volumes" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.856493 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" path="/var/lib/kubelet/pods/3d12c07d-16a5-47c8-94af-fc04a4c0ba9d/volumes" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.858076 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" path="/var/lib/kubelet/pods/990cfb5a-6508-4344-9df7-391f55a70bd8/volumes" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.861464 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" path="/var/lib/kubelet/pods/b901a1db-0fb0-4d58-be99-fdfd812683e6/volumes" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.907915 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.922114 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.928327 4783 scope.go:117] "RemoveContainer" containerID="28abb173cd857cbb9d4a1c5a005edf6a644ce45b75ff79242a7e087b0bfe444f" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.928600 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.935365 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.941057 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.946186 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-84fcfd7bf5-qmzxl"] Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.950560 4783 scope.go:117] "RemoveContainer" containerID="2b3fa9c5a1e21601eec7e4e1e10d5c915c51e4786e66caa45491f931fcfc9654" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.971876 4783 scope.go:117] "RemoveContainer" containerID="2a75fa3505609d0bdb1e679052bbfa1b815931211ba773373b4cf8cf6a6f8ced" Sep 30 13:58:38 crc kubenswrapper[4783]: I0930 13:58:38.991705 4783 scope.go:117] "RemoveContainer" containerID="cecbe25798bb861b6def416ee9dd34eed22000629d5be8de71c4a766d16e7b32" Sep 30 13:58:39 crc kubenswrapper[4783]: I0930 13:58:39.012416 4783 scope.go:117] "RemoveContainer" containerID="263f4dbca5cc00d91bb54ca88f80bb3b726ad47d96c37c7482687ea14abe3852" Sep 30 13:58:39 crc kubenswrapper[4783]: I0930 13:58:39.029478 4783 scope.go:117] "RemoveContainer" containerID="83c83549440f0e945a07ebe3f4406f83362c2602f75efd510a99b82f7f1c32fb" Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.449280 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.449736 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.450141 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.450197 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.451365 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.454160 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.459638 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:39 crc kubenswrapper[4783]: E0930 13:58:39.459721 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.856036 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" path="/var/lib/kubelet/pods/01c79a8c-fb3e-4675-8f73-8e7916e746cc/volumes" Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.857275 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" path="/var/lib/kubelet/pods/563b20bf-7587-442c-86c5-1cbb179a2bf6/volumes" Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.858095 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" path="/var/lib/kubelet/pods/fdd4645f-8430-40ad-9539-663a01c74c13/volumes" Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.964445 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.964703 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="fd739820-88fe-4dc4-9ff6-1dcbee461751" containerName="memcached" containerID="cri-o://f82c79b1e83d1c85b4c136d548ea314373472d27f8b04f8100616ab361665eef" gracePeriod=30 Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.987168 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:58:40 crc kubenswrapper[4783]: I0930 13:58:40.987452 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerName="nova-scheduler-scheduler" containerID="cri-o://c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" gracePeriod=30 Sep 30 13:58:41 crc kubenswrapper[4783]: I0930 13:58:41.837977 4783 generic.go:334] "Generic (PLEG): container finished" podID="fd739820-88fe-4dc4-9ff6-1dcbee461751" containerID="f82c79b1e83d1c85b4c136d548ea314373472d27f8b04f8100616ab361665eef" exitCode=0 Sep 30 13:58:41 crc kubenswrapper[4783]: I0930 13:58:41.838083 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fd739820-88fe-4dc4-9ff6-1dcbee461751","Type":"ContainerDied","Data":"f82c79b1e83d1c85b4c136d548ea314373472d27f8b04f8100616ab361665eef"} Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.644566 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.674418 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle\") pod \"fd739820-88fe-4dc4-9ff6-1dcbee461751\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.674468 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config\") pod \"fd739820-88fe-4dc4-9ff6-1dcbee461751\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.674491 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data\") pod \"fd739820-88fe-4dc4-9ff6-1dcbee461751\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.674541 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs\") pod \"fd739820-88fe-4dc4-9ff6-1dcbee461751\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.674632 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb5kd\" (UniqueName: \"kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd\") pod \"fd739820-88fe-4dc4-9ff6-1dcbee461751\" (UID: \"fd739820-88fe-4dc4-9ff6-1dcbee461751\") " Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.676435 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fd739820-88fe-4dc4-9ff6-1dcbee461751" (UID: "fd739820-88fe-4dc4-9ff6-1dcbee461751"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.678080 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data" (OuterVolumeSpecName: "config-data") pod "fd739820-88fe-4dc4-9ff6-1dcbee461751" (UID: "fd739820-88fe-4dc4-9ff6-1dcbee461751"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.681866 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd" (OuterVolumeSpecName: "kube-api-access-pb5kd") pod "fd739820-88fe-4dc4-9ff6-1dcbee461751" (UID: "fd739820-88fe-4dc4-9ff6-1dcbee461751"). InnerVolumeSpecName "kube-api-access-pb5kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.706932 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd739820-88fe-4dc4-9ff6-1dcbee461751" (UID: "fd739820-88fe-4dc4-9ff6-1dcbee461751"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.726584 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "fd739820-88fe-4dc4-9ff6-1dcbee461751" (UID: "fd739820-88fe-4dc4-9ff6-1dcbee461751"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.776386 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb5kd\" (UniqueName: \"kubernetes.io/projected/fd739820-88fe-4dc4-9ff6-1dcbee461751-kube-api-access-pb5kd\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.776424 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.776477 4783 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.776491 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd739820-88fe-4dc4-9ff6-1dcbee461751-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.776502 4783 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd739820-88fe-4dc4-9ff6-1dcbee461751-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.849035 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.854984 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fd739820-88fe-4dc4-9ff6-1dcbee461751","Type":"ContainerDied","Data":"4088d7b5981065d01c1247da2eab6e658ebe3646d5eac21da832c69a1e4dec67"} Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.855027 4783 scope.go:117] "RemoveContainer" containerID="f82c79b1e83d1c85b4c136d548ea314373472d27f8b04f8100616ab361665eef" Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.885215 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 30 13:58:42 crc kubenswrapper[4783]: I0930 13:58:42.890013 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Sep 30 13:58:43 crc kubenswrapper[4783]: E0930 13:58:43.431173 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:58:43 crc kubenswrapper[4783]: E0930 13:58:43.433091 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:58:43 crc kubenswrapper[4783]: E0930 13:58:43.434681 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 13:58:43 crc kubenswrapper[4783]: E0930 13:58:43.434760 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerName="nova-scheduler-scheduler" Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.448674 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.449298 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.449706 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.449736 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.450374 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.451474 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.452528 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:44 crc kubenswrapper[4783]: E0930 13:58:44.452558 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:44 crc kubenswrapper[4783]: I0930 13:58:44.858855 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd739820-88fe-4dc4-9ff6-1dcbee461751" path="/var/lib/kubelet/pods/fd739820-88fe-4dc4-9ff6-1dcbee461751/volumes" Sep 30 13:58:44 crc kubenswrapper[4783]: I0930 13:58:44.875129 4783 generic.go:334] "Generic (PLEG): container finished" podID="aea997d7-7510-42b0-91f8-07592048868f" containerID="082fb383f645276bbaa075b85be6d49c88105cfa13629a7bcfb3725d2695cb56" exitCode=0 Sep 30 13:58:44 crc kubenswrapper[4783]: I0930 13:58:44.875182 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerDied","Data":"082fb383f645276bbaa075b85be6d49c88105cfa13629a7bcfb3725d2695cb56"} Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.134196 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218265 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218325 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218363 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218454 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218516 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218563 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.218596 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hvf7\" (UniqueName: \"kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7\") pod \"aea997d7-7510-42b0-91f8-07592048868f\" (UID: \"aea997d7-7510-42b0-91f8-07592048868f\") " Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.223456 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.226041 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7" (OuterVolumeSpecName: "kube-api-access-7hvf7") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "kube-api-access-7hvf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.265518 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config" (OuterVolumeSpecName: "config") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.266123 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.271345 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.272385 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.285982 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "aea997d7-7510-42b0-91f8-07592048868f" (UID: "aea997d7-7510-42b0-91f8-07592048868f"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.320063 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.320100 4783 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.324753 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hvf7\" (UniqueName: \"kubernetes.io/projected/aea997d7-7510-42b0-91f8-07592048868f-kube-api-access-7hvf7\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.324769 4783 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.324817 4783 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.324832 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.324843 4783 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aea997d7-7510-42b0-91f8-07592048868f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.894833 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9bc45547-5grb6" event={"ID":"aea997d7-7510-42b0-91f8-07592048868f","Type":"ContainerDied","Data":"f90d7ff480b3466d639d9fd3d1a265fff6e38d607057f215ac9fe6ec3245a129"} Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.895211 4783 scope.go:117] "RemoveContainer" containerID="a27d904ac13e2388585d777e5ccadfdbee717dc617233684f8f3e68da41b3116" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.895508 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9bc45547-5grb6" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.921270 4783 scope.go:117] "RemoveContainer" containerID="082fb383f645276bbaa075b85be6d49c88105cfa13629a7bcfb3725d2695cb56" Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.933741 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:58:45 crc kubenswrapper[4783]: I0930 13:58:45.939589 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7c9bc45547-5grb6"] Sep 30 13:58:46 crc kubenswrapper[4783]: I0930 13:58:46.856081 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aea997d7-7510-42b0-91f8-07592048868f" path="/var/lib/kubelet/pods/aea997d7-7510-42b0-91f8-07592048868f/volumes" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.484446 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.485683 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.487012 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.487052 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.787947 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788345 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788361 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788378 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788386 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788396 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788403 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788414 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-central-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788421 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-central-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788438 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="setup-container" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788445 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="setup-container" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788456 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788463 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788478 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd739820-88fe-4dc4-9ff6-1dcbee461751" containerName="memcached" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788484 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd739820-88fe-4dc4-9ff6-1dcbee461751" containerName="memcached" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788495 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36fb1123-03da-4b8c-b9b1-39caa412db70" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788501 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="36fb1123-03da-4b8c-b9b1-39caa412db70" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788509 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" containerName="keystone-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788517 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" containerName="keystone-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788532 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f157470-67d0-452c-9959-a452400c02d7" containerName="nova-cell0-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788542 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f157470-67d0-452c-9959-a452400c02d7" containerName="nova-cell0-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788556 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" containerName="kube-state-metrics" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788564 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" containerName="kube-state-metrics" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788577 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788584 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788595 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788602 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788614 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788621 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788633 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-notification-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788640 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-notification-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788654 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788661 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788670 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="dnsmasq-dns" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788677 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="dnsmasq-dns" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788690 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="957739b9-90a0-43bf-a5a4-9558993b660f" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788698 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="957739b9-90a0-43bf-a5a4-9558993b660f" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788711 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788718 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788732 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788739 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788750 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788758 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788769 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788776 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788788 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788794 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788802 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="sg-core" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788810 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="sg-core" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788818 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-server" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788825 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-server" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788840 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788846 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788859 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788866 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788876 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="ovsdbserver-sb" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788883 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="ovsdbserver-sb" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788896 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788903 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788914 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788921 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788930 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="ovsdbserver-nb" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788937 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="ovsdbserver-nb" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788947 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="ovn-northd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788954 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="ovn-northd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788968 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788976 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.788989 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.788995 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789009 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerName="nova-cell1-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789016 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerName="nova-cell1-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789028 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789035 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789047 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789054 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789066 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="mysql-bootstrap" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789074 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="mysql-bootstrap" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789085 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789092 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789103 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="634bd6a4-be67-43db-b032-7e083edce6eb" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789110 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="634bd6a4-be67-43db-b032-7e083edce6eb" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789150 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="init" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789157 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="init" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789166 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="galera" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789173 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="galera" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789185 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801ddf87-455e-4941-8637-4c2f5da49d41" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789192 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="801ddf87-455e-4941-8637-4c2f5da49d41" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789203 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789210 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener-log" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789219 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="setup-container" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789239 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="setup-container" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789253 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789260 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789269 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789276 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789291 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789298 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789309 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789316 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789330 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5026d481-7d2b-40cd-8369-17892ed22c77" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789338 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5026d481-7d2b-40cd-8369-17892ed22c77" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789345 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789352 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789366 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789372 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api" Sep 30 13:58:47 crc kubenswrapper[4783]: E0930 13:58:47.789384 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789391 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789600 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-notification-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789620 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789631 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f381dd59-999a-4cd2-8dc1-d0faea63df2c" containerName="kube-state-metrics" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789642 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789650 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="ceilometer-central-agent" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789662 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789672 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="08ace7d9-55e6-45c8-aea8-e94ab5c3d6bf" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789679 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789688 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdd4645f-8430-40ad-9539-663a01c74c13" containerName="keystone-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789697 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789709 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f157470-67d0-452c-9959-a452400c02d7" containerName="nova-cell0-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789716 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="957739b9-90a0-43bf-a5a4-9558993b660f" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789724 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2419c631-f6ff-431e-bb3b-2c3285eda678" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789734 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-metadata" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789743 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="634bd6a4-be67-43db-b032-7e083edce6eb" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789757 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5026d481-7d2b-40cd-8369-17892ed22c77" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789770 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fece54c-da0b-4cc0-b20d-b442b2fa73ed" containerName="cinder-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789782 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789791 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b901a1db-0fb0-4d58-be99-fdfd812683e6" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789804 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789812 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="164c5743-32f5-4347-9c9d-20d28f1f2dce" containerName="rabbitmq" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789822 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e0048e0-a916-434d-abd4-571cec7d4b6a" containerName="ovn-northd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789832 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789841 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789850 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a19af6c-8b2e-41f3-ac68-012bd49e514b" containerName="barbican-worker" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789861 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="ovsdbserver-nb" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789871 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1989fc2-d0ba-49ce-a488-589eaaaecb58" containerName="galera" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789883 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="36fb1123-03da-4b8c-b9b1-39caa412db70" containerName="mariadb-account-delete" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789894 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd739820-88fe-4dc4-9ff6-1dcbee461751" containerName="memcached" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789903 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b97c668-20f4-48a9-a8ef-f5878e6aa23f" containerName="placement-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789915 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789925 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bc852c2-c59b-4b84-bbfc-c8b62354c66d" containerName="ovn-controller" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789935 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61c8e26-064d-430a-8bb8-4e3c5e192d3a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789947 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="ovsdbserver-sb" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789955 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="801ddf87-455e-4941-8637-4c2f5da49d41" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789964 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="284aafcd-4081-400f-a1c3-9992b3557fc1" containerName="dnsmasq-dns" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789975 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="990cfb5a-6508-4344-9df7-391f55a70bd8" containerName="nova-api-api" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789986 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7ef9466-e9f5-467e-9b43-2b7952e5b479" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.789994 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="563b20bf-7587-442c-86c5-1cbb179a2bf6" containerName="nova-cell1-conductor-conductor" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790003 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="proxy-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790014 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c707a7c5-54fa-4430-8bbe-ac8eebbb0a59" containerName="openstack-network-exporter" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790026 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf790ec-b4f7-4734-92a0-929ed51c08ec" containerName="proxy-server" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790037 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="de5783b8-dd5d-4570-ada8-5b1775a75813" containerName="nova-metadata-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790047 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea997d7-7510-42b0-91f8-07592048868f" containerName="neutron-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790054 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="050b08a6-64b8-4237-acfc-37711efa8361" containerName="glance-httpd" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790062 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790071 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f741556-230b-409c-b9bd-d0dc1abbcd77" containerName="barbican-api-log" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790085 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c79a8c-fb3e-4675-8f73-8e7916e746cc" containerName="sg-core" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.790099 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d12c07d-16a5-47c8-94af-fc04a4c0ba9d" containerName="barbican-keystone-listener" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.791535 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.802674 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.867298 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.867703 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm6qc\" (UniqueName: \"kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.867758 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.922924 4783 generic.go:334] "Generic (PLEG): container finished" podID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerID="c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" exitCode=0 Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.922974 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d0280c83-c3f5-45d6-abb4-df04dbeed8e3","Type":"ContainerDied","Data":"c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23"} Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.969605 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm6qc\" (UniqueName: \"kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.969683 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.970031 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.970372 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.970682 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:47 crc kubenswrapper[4783]: I0930 13:58:47.994268 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm6qc\" (UniqueName: \"kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc\") pod \"certified-operators-86g9g\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.109278 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.209513 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.276809 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cspcg\" (UniqueName: \"kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg\") pod \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.277075 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle\") pod \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.277191 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data\") pod \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\" (UID: \"d0280c83-c3f5-45d6-abb4-df04dbeed8e3\") " Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.283388 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg" (OuterVolumeSpecName: "kube-api-access-cspcg") pod "d0280c83-c3f5-45d6-abb4-df04dbeed8e3" (UID: "d0280c83-c3f5-45d6-abb4-df04dbeed8e3"). InnerVolumeSpecName "kube-api-access-cspcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.343587 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data" (OuterVolumeSpecName: "config-data") pod "d0280c83-c3f5-45d6-abb4-df04dbeed8e3" (UID: "d0280c83-c3f5-45d6-abb4-df04dbeed8e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.350510 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0280c83-c3f5-45d6-abb4-df04dbeed8e3" (UID: "d0280c83-c3f5-45d6-abb4-df04dbeed8e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.379142 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cspcg\" (UniqueName: \"kubernetes.io/projected/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-kube-api-access-cspcg\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.379172 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.379181 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0280c83-c3f5-45d6-abb4-df04dbeed8e3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.649889 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.935380 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerStarted","Data":"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a"} Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.935546 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerStarted","Data":"c98d0cc0500d772156d4b6df51ad3a0d05ce7bf89a93e925597878178c074fd7"} Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.937275 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d0280c83-c3f5-45d6-abb4-df04dbeed8e3","Type":"ContainerDied","Data":"eb422180d6c2172a15a7791d0b32720cc74bfe22376ccdf6707ae39cbdd2098d"} Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.937317 4783 scope.go:117] "RemoveContainer" containerID="c39ea6e468f1e1a6206c2bea75db09565a04fd0520a06ccc04f611cbaa92fa23" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.937470 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.971425 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:58:48 crc kubenswrapper[4783]: I0930 13:58:48.977278 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.448516 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.449410 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.450053 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.450123 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.450726 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.452472 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.453780 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:49 crc kubenswrapper[4783]: E0930 13:58:49.453854 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:49 crc kubenswrapper[4783]: I0930 13:58:49.952529 4783 generic.go:334] "Generic (PLEG): container finished" podID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerID="b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a" exitCode=0 Sep 30 13:58:49 crc kubenswrapper[4783]: I0930 13:58:49.952584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerDied","Data":"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a"} Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.565070 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.615930 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.615991 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89n9f\" (UniqueName: \"kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616047 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616072 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616096 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616139 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616189 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616242 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.616297 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated\") pod \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\" (UID: \"2a09ae34-f770-404f-b7ec-1fd3b630bf4c\") " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.617408 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.617644 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.617717 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.617890 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.618395 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.624031 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets" (OuterVolumeSpecName: "secrets") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.624214 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f" (OuterVolumeSpecName: "kube-api-access-89n9f") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "kube-api-access-89n9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.631744 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.654985 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.670647 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "2a09ae34-f770-404f-b7ec-1fd3b630bf4c" (UID: "2a09ae34-f770-404f-b7ec-1fd3b630bf4c"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718794 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718830 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89n9f\" (UniqueName: \"kubernetes.io/projected/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kube-api-access-89n9f\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718842 4783 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718852 4783 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718860 4783 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718868 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718876 4783 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a09ae34-f770-404f-b7ec-1fd3b630bf4c-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.718908 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.732418 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.820094 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.857076 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" path="/var/lib/kubelet/pods/d0280c83-c3f5-45d6-abb4-df04dbeed8e3/volumes" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.968089 4783 generic.go:334] "Generic (PLEG): container finished" podID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" exitCode=0 Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.968144 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerDied","Data":"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982"} Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.968180 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2a09ae34-f770-404f-b7ec-1fd3b630bf4c","Type":"ContainerDied","Data":"4edfe9588d5b6bcd8205f603bfe106257e8c8e26615234021c5ccdf0643d19cc"} Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.968206 4783 scope.go:117] "RemoveContainer" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" Sep 30 13:58:50 crc kubenswrapper[4783]: I0930 13:58:50.968369 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.006314 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.012507 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.034604 4783 scope.go:117] "RemoveContainer" containerID="759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.058844 4783 scope.go:117] "RemoveContainer" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" Sep 30 13:58:51 crc kubenswrapper[4783]: E0930 13:58:51.059723 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982\": container with ID starting with 5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982 not found: ID does not exist" containerID="5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.059765 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982"} err="failed to get container status \"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982\": rpc error: code = NotFound desc = could not find container \"5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982\": container with ID starting with 5a541fb070bfd9e6649498eddf37a4834ba62e46927dc04fa0bcf9df4e4e9982 not found: ID does not exist" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.059792 4783 scope.go:117] "RemoveContainer" containerID="759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a" Sep 30 13:58:51 crc kubenswrapper[4783]: E0930 13:58:51.060201 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a\": container with ID starting with 759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a not found: ID does not exist" containerID="759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.060283 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a"} err="failed to get container status \"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a\": rpc error: code = NotFound desc = could not find container \"759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a\": container with ID starting with 759146106a016f0ffdf2a8c450a2694a39e00cea474b6537ea3f0686d0b0320a not found: ID does not exist" Sep 30 13:58:51 crc kubenswrapper[4783]: I0930 13:58:51.986286 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerStarted","Data":"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7"} Sep 30 13:58:52 crc kubenswrapper[4783]: I0930 13:58:52.852963 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" path="/var/lib/kubelet/pods/2a09ae34-f770-404f-b7ec-1fd3b630bf4c/volumes" Sep 30 13:58:53 crc kubenswrapper[4783]: I0930 13:58:53.001294 4783 generic.go:334] "Generic (PLEG): container finished" podID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerID="3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7" exitCode=0 Sep 30 13:58:53 crc kubenswrapper[4783]: I0930 13:58:53.001351 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerDied","Data":"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7"} Sep 30 13:58:54 crc kubenswrapper[4783]: I0930 13:58:54.031732 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerStarted","Data":"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca"} Sep 30 13:58:54 crc kubenswrapper[4783]: I0930 13:58:54.062706 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-86g9g" podStartSLOduration=3.431754255 podStartE2EDuration="7.062686658s" podCreationTimestamp="2025-09-30 13:58:47 +0000 UTC" firstStartedPulling="2025-09-30 13:58:49.955256617 +0000 UTC m=+1429.886722924" lastFinishedPulling="2025-09-30 13:58:53.58618901 +0000 UTC m=+1433.517655327" observedRunningTime="2025-09-30 13:58:54.060088034 +0000 UTC m=+1433.991554351" watchObservedRunningTime="2025-09-30 13:58:54.062686658 +0000 UTC m=+1433.994152975" Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.449054 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.450062 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.451197 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.451296 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.451192 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.453932 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.456786 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:54 crc kubenswrapper[4783]: E0930 13:58:54.456853 4783 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:58 crc kubenswrapper[4783]: I0930 13:58:58.110500 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:58 crc kubenswrapper[4783]: I0930 13:58:58.111317 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:58 crc kubenswrapper[4783]: I0930 13:58:58.157066 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:59 crc kubenswrapper[4783]: I0930 13:58:59.138429 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:58:59 crc kubenswrapper[4783]: I0930 13:58:59.179128 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.449258 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 is running failed: container process not found" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.449324 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.449658 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 is running failed: container process not found" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.449750 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.450079 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 is running failed: container process not found" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.450107 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.450172 4783 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 30 13:58:59 crc kubenswrapper[4783]: E0930 13:58:59.450268 4783 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ttc29" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 13:58:59 crc kubenswrapper[4783]: I0930 13:58:59.949587 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ttc29_61f71f56-b66e-46a2-a0c5-25d0477db0a2/ovs-vswitchd/0.log" Sep 30 13:58:59 crc kubenswrapper[4783]: I0930 13:58:59.950979 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052028 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052107 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5t44\" (UniqueName: \"kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052141 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib" (OuterVolumeSpecName: "var-lib") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052161 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052209 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run" (OuterVolumeSpecName: "var-run") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052258 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052319 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052348 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs\") pod \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\" (UID: \"61f71f56-b66e-46a2-a0c5-25d0477db0a2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052662 4783 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-lib\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052678 4783 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052701 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.052725 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log" (OuterVolumeSpecName: "var-log") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.053911 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts" (OuterVolumeSpecName: "scripts") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.057868 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44" (OuterVolumeSpecName: "kube-api-access-l5t44") pod "61f71f56-b66e-46a2-a0c5-25d0477db0a2" (UID: "61f71f56-b66e-46a2-a0c5-25d0477db0a2"). InnerVolumeSpecName "kube-api-access-l5t44". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.112726 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ttc29_61f71f56-b66e-46a2-a0c5-25d0477db0a2/ovs-vswitchd/0.log" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.114245 4783 generic.go:334] "Generic (PLEG): container finished" podID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" exitCode=137 Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.114327 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ttc29" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.114332 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerDied","Data":"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849"} Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.114419 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ttc29" event={"ID":"61f71f56-b66e-46a2-a0c5-25d0477db0a2","Type":"ContainerDied","Data":"7fe8b7a26f633c58303ac99c85d730b53e3e8d3098e0808c7bb981ad69f17e68"} Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.114470 4783 scope.go:117] "RemoveContainer" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.146140 4783 scope.go:117] "RemoveContainer" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.153882 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5t44\" (UniqueName: \"kubernetes.io/projected/61f71f56-b66e-46a2-a0c5-25d0477db0a2-kube-api-access-l5t44\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.153907 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61f71f56-b66e-46a2-a0c5-25d0477db0a2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.153917 4783 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-var-log\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.153925 4783 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/61f71f56-b66e-46a2-a0c5-25d0477db0a2-etc-ovs\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.156761 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.164437 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-ttc29"] Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.184748 4783 scope.go:117] "RemoveContainer" containerID="bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.212204 4783 scope.go:117] "RemoveContainer" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" Sep 30 13:59:00 crc kubenswrapper[4783]: E0930 13:59:00.212833 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849\": container with ID starting with dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 not found: ID does not exist" containerID="dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.212876 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849"} err="failed to get container status \"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849\": rpc error: code = NotFound desc = could not find container \"dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849\": container with ID starting with dcb27e016b7febdf6e8461ef95e368c4d9420a7a8c9d3cc686b707c193709849 not found: ID does not exist" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.212901 4783 scope.go:117] "RemoveContainer" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" Sep 30 13:59:00 crc kubenswrapper[4783]: E0930 13:59:00.213426 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204\": container with ID starting with ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 not found: ID does not exist" containerID="ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.213473 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204"} err="failed to get container status \"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204\": rpc error: code = NotFound desc = could not find container \"ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204\": container with ID starting with ec2f367ba3231f09d3142d822a9653d97fa620c074beddfbf2bebd4e37dfc204 not found: ID does not exist" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.213505 4783 scope.go:117] "RemoveContainer" containerID="bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc" Sep 30 13:59:00 crc kubenswrapper[4783]: E0930 13:59:00.213877 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc\": container with ID starting with bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc not found: ID does not exist" containerID="bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.213909 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc"} err="failed to get container status \"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc\": rpc error: code = NotFound desc = could not find container \"bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc\": container with ID starting with bfe780365ea46fb5e1592711f467ca713d3ca1a21a2df2e0b8564b2d0d43f7bc not found: ID does not exist" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.754912 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.769691 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.856778 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" path="/var/lib/kubelet/pods/61f71f56-b66e-46a2-a0c5-25d0477db0a2/volumes" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.862897 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache\") pod \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.862925 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.862956 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock\") pod \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.862994 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863014 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq6fk\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk\") pod \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863042 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863060 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863078 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863093 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863134 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") pod \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863153 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\" (UID: \"3da50d95-fee8-4e78-ad46-c2d8ac95adc2\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863183 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6kdw\" (UniqueName: \"kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw\") pod \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\" (UID: \"b1dc1d2a-552d-4400-9d1b-12a3a051c432\") " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863459 4783 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1dc1d2a-552d-4400-9d1b-12a3a051c432-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.863824 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock" (OuterVolumeSpecName: "lock") pod "3da50d95-fee8-4e78-ad46-c2d8ac95adc2" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.865168 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache" (OuterVolumeSpecName: "cache") pod "3da50d95-fee8-4e78-ad46-c2d8ac95adc2" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.866850 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "swift") pod "3da50d95-fee8-4e78-ad46-c2d8ac95adc2" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.867008 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw" (OuterVolumeSpecName: "kube-api-access-k6kdw") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "kube-api-access-k6kdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.867069 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts" (OuterVolumeSpecName: "scripts") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.867142 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk" (OuterVolumeSpecName: "kube-api-access-rq6fk") pod "3da50d95-fee8-4e78-ad46-c2d8ac95adc2" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2"). InnerVolumeSpecName "kube-api-access-rq6fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.867284 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.867287 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3da50d95-fee8-4e78-ad46-c2d8ac95adc2" (UID: "3da50d95-fee8-4e78-ad46-c2d8ac95adc2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.903563 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.929366 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data" (OuterVolumeSpecName: "config-data") pod "b1dc1d2a-552d-4400-9d1b-12a3a051c432" (UID: "b1dc1d2a-552d-4400-9d1b-12a3a051c432"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964460 4783 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964528 4783 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964571 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964590 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6kdw\" (UniqueName: \"kubernetes.io/projected/b1dc1d2a-552d-4400-9d1b-12a3a051c432-kube-api-access-k6kdw\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964608 4783 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-cache\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964623 4783 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-lock\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964642 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964661 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq6fk\" (UniqueName: \"kubernetes.io/projected/3da50d95-fee8-4e78-ad46-c2d8ac95adc2-kube-api-access-rq6fk\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964677 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.964692 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1dc1d2a-552d-4400-9d1b-12a3a051c432-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:00 crc kubenswrapper[4783]: I0930 13:59:00.984576 4783 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.066138 4783 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.128574 4783 generic.go:334] "Generic (PLEG): container finished" podID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerID="024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e" exitCode=137 Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.128633 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.128632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerDied","Data":"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e"} Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.128803 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1dc1d2a-552d-4400-9d1b-12a3a051c432","Type":"ContainerDied","Data":"0f1c9ebe4e5e0df5b0b08c378fe3daffed45fbc50314a6f51ae989e9816b5156"} Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.128824 4783 scope.go:117] "RemoveContainer" containerID="d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.139854 4783 generic.go:334] "Generic (PLEG): container finished" podID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerID="6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44" exitCode=137 Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.140020 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.140075 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-86g9g" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="registry-server" containerID="cri-o://b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca" gracePeriod=2 Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.140159 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44"} Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.140244 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3da50d95-fee8-4e78-ad46-c2d8ac95adc2","Type":"ContainerDied","Data":"99da05eaacc7b0a5f19d53d07393eef42f90815ddbdfb308b4ac60fcc78a3217"} Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.171117 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.175176 4783 scope.go:117] "RemoveContainer" containerID="024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.176514 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.193398 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.199507 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.202807 4783 scope.go:117] "RemoveContainer" containerID="d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.203420 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276\": container with ID starting with d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276 not found: ID does not exist" containerID="d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.203452 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276"} err="failed to get container status \"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276\": rpc error: code = NotFound desc = could not find container \"d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276\": container with ID starting with d100cc0e53e2504d5d93fa913ed337f4d3bdd4130801738388f1ae3625b57276 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.203472 4783 scope.go:117] "RemoveContainer" containerID="024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.203928 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e\": container with ID starting with 024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e not found: ID does not exist" containerID="024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.203950 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e"} err="failed to get container status \"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e\": rpc error: code = NotFound desc = could not find container \"024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e\": container with ID starting with 024211a566129e37ebcc5af066c31416fc39da5253df84444b600a23f5f7a81e not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.203961 4783 scope.go:117] "RemoveContainer" containerID="6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.223279 4783 scope.go:117] "RemoveContainer" containerID="0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.241608 4783 scope.go:117] "RemoveContainer" containerID="a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.387957 4783 scope.go:117] "RemoveContainer" containerID="e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.408552 4783 scope.go:117] "RemoveContainer" containerID="65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.469165 4783 scope.go:117] "RemoveContainer" containerID="f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.496580 4783 scope.go:117] "RemoveContainer" containerID="9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.514804 4783 scope.go:117] "RemoveContainer" containerID="252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.617141 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.671003 4783 scope.go:117] "RemoveContainer" containerID="84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.671584 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content\") pod \"7a99ba51-9d75-4181-b157-95c6ddb615f2\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.671650 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm6qc\" (UniqueName: \"kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc\") pod \"7a99ba51-9d75-4181-b157-95c6ddb615f2\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.671739 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities\") pod \"7a99ba51-9d75-4181-b157-95c6ddb615f2\" (UID: \"7a99ba51-9d75-4181-b157-95c6ddb615f2\") " Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.672935 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities" (OuterVolumeSpecName: "utilities") pod "7a99ba51-9d75-4181-b157-95c6ddb615f2" (UID: "7a99ba51-9d75-4181-b157-95c6ddb615f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.678399 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc" (OuterVolumeSpecName: "kube-api-access-pm6qc") pod "7a99ba51-9d75-4181-b157-95c6ddb615f2" (UID: "7a99ba51-9d75-4181-b157-95c6ddb615f2"). InnerVolumeSpecName "kube-api-access-pm6qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.696745 4783 scope.go:117] "RemoveContainer" containerID="b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.726136 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a99ba51-9d75-4181-b157-95c6ddb615f2" (UID: "7a99ba51-9d75-4181-b157-95c6ddb615f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.732192 4783 scope.go:117] "RemoveContainer" containerID="5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.756033 4783 scope.go:117] "RemoveContainer" containerID="bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.771414 4783 scope.go:117] "RemoveContainer" containerID="8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.772675 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.772708 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a99ba51-9d75-4181-b157-95c6ddb615f2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.772729 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm6qc\" (UniqueName: \"kubernetes.io/projected/7a99ba51-9d75-4181-b157-95c6ddb615f2-kube-api-access-pm6qc\") on node \"crc\" DevicePath \"\"" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.785684 4783 scope.go:117] "RemoveContainer" containerID="e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.803028 4783 scope.go:117] "RemoveContainer" containerID="19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.826074 4783 scope.go:117] "RemoveContainer" containerID="6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.826541 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44\": container with ID starting with 6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44 not found: ID does not exist" containerID="6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.826578 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44"} err="failed to get container status \"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44\": rpc error: code = NotFound desc = could not find container \"6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44\": container with ID starting with 6891d76e2dc7453b6d905b53c783801dbea028a59018a446002e2af529d42a44 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.826601 4783 scope.go:117] "RemoveContainer" containerID="0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.826949 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50\": container with ID starting with 0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50 not found: ID does not exist" containerID="0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.826975 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50"} err="failed to get container status \"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50\": rpc error: code = NotFound desc = could not find container \"0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50\": container with ID starting with 0d70c68a7c4fc63e37a1cd88f352dcc6ea4b65b3ee61fb6b9e535bf1688edd50 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.826990 4783 scope.go:117] "RemoveContainer" containerID="a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.827329 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a\": container with ID starting with a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a not found: ID does not exist" containerID="a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827348 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a"} err="failed to get container status \"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a\": rpc error: code = NotFound desc = could not find container \"a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a\": container with ID starting with a5d2dcabd6bb3cd5f6248c47300f6f1b1f5ab3e6ca65c46a1385315a9950b46a not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827360 4783 scope.go:117] "RemoveContainer" containerID="e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.827621 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f\": container with ID starting with e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f not found: ID does not exist" containerID="e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827647 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f"} err="failed to get container status \"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f\": rpc error: code = NotFound desc = could not find container \"e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f\": container with ID starting with e63439a8f0b25c832bdb5e04264df59bbf40a59ee781f02104bc7c90f0387d0f not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827663 4783 scope.go:117] "RemoveContainer" containerID="65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.827890 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef\": container with ID starting with 65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef not found: ID does not exist" containerID="65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827918 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef"} err="failed to get container status \"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef\": rpc error: code = NotFound desc = could not find container \"65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef\": container with ID starting with 65784350e77591c1ca799cd313cc75e676df485f81c5767c0ec61775c2feddef not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.827936 4783 scope.go:117] "RemoveContainer" containerID="f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.828177 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590\": container with ID starting with f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590 not found: ID does not exist" containerID="f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828206 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590"} err="failed to get container status \"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590\": rpc error: code = NotFound desc = could not find container \"f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590\": container with ID starting with f059bd6d5a4af4ef530539f9bc6ad12759d52d860d66ae9359e0c13a0faf1590 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828237 4783 scope.go:117] "RemoveContainer" containerID="9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.828438 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8\": container with ID starting with 9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8 not found: ID does not exist" containerID="9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828463 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8"} err="failed to get container status \"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8\": rpc error: code = NotFound desc = could not find container \"9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8\": container with ID starting with 9d2fb4518ac235b269595c179e2eddcd2176f75944af31f8741a4f3a3772afd8 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828481 4783 scope.go:117] "RemoveContainer" containerID="252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.828745 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c\": container with ID starting with 252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c not found: ID does not exist" containerID="252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828791 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c"} err="failed to get container status \"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c\": rpc error: code = NotFound desc = could not find container \"252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c\": container with ID starting with 252f0904e64a3d0faf4018536bd7548f2c58c560fdd89b8a833d73b47bb1648c not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.828821 4783 scope.go:117] "RemoveContainer" containerID="84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.829059 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3\": container with ID starting with 84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3 not found: ID does not exist" containerID="84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829089 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3"} err="failed to get container status \"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3\": rpc error: code = NotFound desc = could not find container \"84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3\": container with ID starting with 84c1b4b25db238e614e22d807ca489645bb6aae387ddda2ee411cb9193dadcc3 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829107 4783 scope.go:117] "RemoveContainer" containerID="b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.829371 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285\": container with ID starting with b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285 not found: ID does not exist" containerID="b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829402 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285"} err="failed to get container status \"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285\": rpc error: code = NotFound desc = could not find container \"b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285\": container with ID starting with b68136dd9fc59706f3378836d528857df1eec12a03161416d9087d37a2d7d285 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829421 4783 scope.go:117] "RemoveContainer" containerID="5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.829663 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7\": container with ID starting with 5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7 not found: ID does not exist" containerID="5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829691 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7"} err="failed to get container status \"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7\": rpc error: code = NotFound desc = could not find container \"5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7\": container with ID starting with 5e62b2afddb1cd79f42408968b4363c8781a372ab3e53833b770416cab3087b7 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829709 4783 scope.go:117] "RemoveContainer" containerID="bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.829883 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3\": container with ID starting with bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3 not found: ID does not exist" containerID="bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829907 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3"} err="failed to get container status \"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3\": rpc error: code = NotFound desc = could not find container \"bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3\": container with ID starting with bd7cfdf6e1722a5178a727be64336e94d492136884d482d78c9458aafb01c3e3 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.829925 4783 scope.go:117] "RemoveContainer" containerID="8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.830093 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898\": container with ID starting with 8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898 not found: ID does not exist" containerID="8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.830120 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898"} err="failed to get container status \"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898\": rpc error: code = NotFound desc = could not find container \"8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898\": container with ID starting with 8aec62a44e6d0e2bf5d9c89e16252de35b71ae052ea1691ca721df50cb2cd898 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.830168 4783 scope.go:117] "RemoveContainer" containerID="e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.830391 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1\": container with ID starting with e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1 not found: ID does not exist" containerID="e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.830419 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1"} err="failed to get container status \"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1\": rpc error: code = NotFound desc = could not find container \"e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1\": container with ID starting with e78afbdd94a9616ec2021e98bb1be4fbc47a48f38b3c103a4fcefb64434fd5b1 not found: ID does not exist" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.830435 4783 scope.go:117] "RemoveContainer" containerID="19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73" Sep 30 13:59:01 crc kubenswrapper[4783]: E0930 13:59:01.830628 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73\": container with ID starting with 19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73 not found: ID does not exist" containerID="19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73" Sep 30 13:59:01 crc kubenswrapper[4783]: I0930 13:59:01.830654 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73"} err="failed to get container status \"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73\": rpc error: code = NotFound desc = could not find container \"19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73\": container with ID starting with 19f8581fd57609c6eca4ec015f369dde264c61b10ce59f14103fa1cc03844e73 not found: ID does not exist" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.157519 4783 generic.go:334] "Generic (PLEG): container finished" podID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerID="b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca" exitCode=0 Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.157614 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerDied","Data":"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca"} Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.157694 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-86g9g" event={"ID":"7a99ba51-9d75-4181-b157-95c6ddb615f2","Type":"ContainerDied","Data":"c98d0cc0500d772156d4b6df51ad3a0d05ce7bf89a93e925597878178c074fd7"} Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.157695 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-86g9g" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.157742 4783 scope.go:117] "RemoveContainer" containerID="b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.193924 4783 scope.go:117] "RemoveContainer" containerID="3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.201861 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.207829 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-86g9g"] Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.233572 4783 scope.go:117] "RemoveContainer" containerID="b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.260179 4783 scope.go:117] "RemoveContainer" containerID="b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca" Sep 30 13:59:02 crc kubenswrapper[4783]: E0930 13:59:02.260664 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca\": container with ID starting with b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca not found: ID does not exist" containerID="b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.260760 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca"} err="failed to get container status \"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca\": rpc error: code = NotFound desc = could not find container \"b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca\": container with ID starting with b5a72290ddc8c27ec0800c8f8746e612bfce0cc801c6b7730182bf6b633703ca not found: ID does not exist" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.260801 4783 scope.go:117] "RemoveContainer" containerID="3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7" Sep 30 13:59:02 crc kubenswrapper[4783]: E0930 13:59:02.261154 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7\": container with ID starting with 3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7 not found: ID does not exist" containerID="3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.261215 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7"} err="failed to get container status \"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7\": rpc error: code = NotFound desc = could not find container \"3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7\": container with ID starting with 3b29ce60f48e2f01e452b162430ec742618b59c593f361118cb4043e15e233a7 not found: ID does not exist" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.261277 4783 scope.go:117] "RemoveContainer" containerID="b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a" Sep 30 13:59:02 crc kubenswrapper[4783]: E0930 13:59:02.261701 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a\": container with ID starting with b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a not found: ID does not exist" containerID="b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.261732 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a"} err="failed to get container status \"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a\": rpc error: code = NotFound desc = could not find container \"b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a\": container with ID starting with b8e417a2dd312c12a6209ffc87a73b7f8459f3feca5db79a9eb0df543b32571a not found: ID does not exist" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.851607 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" path="/var/lib/kubelet/pods/3da50d95-fee8-4e78-ad46-c2d8ac95adc2/volumes" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.853574 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" path="/var/lib/kubelet/pods/7a99ba51-9d75-4181-b157-95c6ddb615f2/volumes" Sep 30 13:59:02 crc kubenswrapper[4783]: I0930 13:59:02.854128 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" path="/var/lib/kubelet/pods/b1dc1d2a-552d-4400-9d1b-12a3a051c432/volumes" Sep 30 13:59:14 crc kubenswrapper[4783]: I0930 13:59:14.697943 4783 scope.go:117] "RemoveContainer" containerID="c1abab52854f97e2baa52627f04d820409556ef44e46042b8cc88ae108210266" Sep 30 13:59:14 crc kubenswrapper[4783]: I0930 13:59:14.738148 4783 scope.go:117] "RemoveContainer" containerID="8b08ad557e8661b66aeaeaebf65ee5d1552ababcaf7404348631f3f5ac12cc25" Sep 30 13:59:14 crc kubenswrapper[4783]: I0930 13:59:14.771095 4783 scope.go:117] "RemoveContainer" containerID="21aff4dd95bde5a8670e89f3dd3efd6a09ad9db3e02d379da32cd13f67cabcae" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.168615 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s"] Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169558 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="cinder-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169575 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="cinder-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169584 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerName="nova-scheduler-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169593 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerName="nova-scheduler-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169607 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="rsync" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169615 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="rsync" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169629 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169637 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169656 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169663 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169681 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169689 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169701 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="mysql-bootstrap" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169710 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="mysql-bootstrap" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169722 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169731 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-server" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169745 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169753 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169762 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="extract-content" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169769 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="extract-content" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169782 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="registry-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169791 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="registry-server" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169801 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server-init" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169808 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server-init" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169820 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169827 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169841 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169847 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-server" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169859 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="probe" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169867 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="probe" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169885 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-expirer" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169893 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-expirer" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169903 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-reaper" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169910 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-reaper" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169920 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169928 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169937 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169944 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169955 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169962 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169971 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169978 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.169987 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="swift-recon-cron" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.169995 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="swift-recon-cron" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.170006 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170013 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.170025 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="extract-utilities" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170033 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="extract-utilities" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.170044 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170051 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 14:00:00 crc kubenswrapper[4783]: E0930 14:00:00.170068 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170123 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170302 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170318 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170332 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-reaper" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170345 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0280c83-c3f5-45d6-abb4-df04dbeed8e3" containerName="nova-scheduler-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170357 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-expirer" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170373 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170386 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a09ae34-f770-404f-b7ec-1fd3b630bf4c" containerName="galera" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170396 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170411 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170421 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a99ba51-9d75-4181-b157-95c6ddb615f2" containerName="registry-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170432 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="rsync" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170443 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170453 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="cinder-scheduler" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170463 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-replicator" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170472 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170482 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="object-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170491 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1dc1d2a-552d-4400-9d1b-12a3a051c432" containerName="probe" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170500 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="account-auditor" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170514 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovs-vswitchd" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170527 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="61f71f56-b66e-46a2-a0c5-25d0477db0a2" containerName="ovsdb-server" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170540 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="container-updater" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.170548 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da50d95-fee8-4e78-ad46-c2d8ac95adc2" containerName="swift-recon-cron" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.171160 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.177050 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.177492 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.177667 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s"] Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.247404 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.247653 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.247794 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwxnh\" (UniqueName: \"kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.348855 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.349223 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.349688 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwxnh\" (UniqueName: \"kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.350099 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.358990 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.365960 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwxnh\" (UniqueName: \"kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh\") pod \"collect-profiles-29320680-w457s\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.496786 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:00 crc kubenswrapper[4783]: I0930 14:00:00.909639 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s"] Sep 30 14:00:01 crc kubenswrapper[4783]: I0930 14:00:01.742134 4783 generic.go:334] "Generic (PLEG): container finished" podID="68e75c21-2820-4856-8e09-f353a6f0661a" containerID="a4f40b91e052d74d21f2dc438123afc52774fed053a4b215f1f46720fa8edbc1" exitCode=0 Sep 30 14:00:01 crc kubenswrapper[4783]: I0930 14:00:01.742189 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" event={"ID":"68e75c21-2820-4856-8e09-f353a6f0661a","Type":"ContainerDied","Data":"a4f40b91e052d74d21f2dc438123afc52774fed053a4b215f1f46720fa8edbc1"} Sep 30 14:00:01 crc kubenswrapper[4783]: I0930 14:00:01.742484 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" event={"ID":"68e75c21-2820-4856-8e09-f353a6f0661a","Type":"ContainerStarted","Data":"f93f3a97a192623d0ecbee856e32518b104d8d0f15291f67fd9a076540deea28"} Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.000361 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.187828 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume\") pod \"68e75c21-2820-4856-8e09-f353a6f0661a\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.187954 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwxnh\" (UniqueName: \"kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh\") pod \"68e75c21-2820-4856-8e09-f353a6f0661a\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.188026 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume\") pod \"68e75c21-2820-4856-8e09-f353a6f0661a\" (UID: \"68e75c21-2820-4856-8e09-f353a6f0661a\") " Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.188623 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume" (OuterVolumeSpecName: "config-volume") pod "68e75c21-2820-4856-8e09-f353a6f0661a" (UID: "68e75c21-2820-4856-8e09-f353a6f0661a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.192836 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "68e75c21-2820-4856-8e09-f353a6f0661a" (UID: "68e75c21-2820-4856-8e09-f353a6f0661a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.194143 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh" (OuterVolumeSpecName: "kube-api-access-jwxnh") pod "68e75c21-2820-4856-8e09-f353a6f0661a" (UID: "68e75c21-2820-4856-8e09-f353a6f0661a"). InnerVolumeSpecName "kube-api-access-jwxnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.289332 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwxnh\" (UniqueName: \"kubernetes.io/projected/68e75c21-2820-4856-8e09-f353a6f0661a-kube-api-access-jwxnh\") on node \"crc\" DevicePath \"\"" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.289374 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68e75c21-2820-4856-8e09-f353a6f0661a-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.289386 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68e75c21-2820-4856-8e09-f353a6f0661a-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.760264 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" event={"ID":"68e75c21-2820-4856-8e09-f353a6f0661a","Type":"ContainerDied","Data":"f93f3a97a192623d0ecbee856e32518b104d8d0f15291f67fd9a076540deea28"} Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.760310 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f93f3a97a192623d0ecbee856e32518b104d8d0f15291f67fd9a076540deea28" Sep 30 14:00:03 crc kubenswrapper[4783]: I0930 14:00:03.760371 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.452792 4783 scope.go:117] "RemoveContainer" containerID="bfdcaaa26c0aafae7f67df9116fac849120c495a3024bcb87d2ec57cb5988177" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.486956 4783 scope.go:117] "RemoveContainer" containerID="26f2653ce8c6fa111aefaee7376c8de9973a390c28f5166fee1bda8b0fa25259" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.527173 4783 scope.go:117] "RemoveContainer" containerID="3ef861bef2f56add4869192e5592580e98d2db290f25ea904d3a2752fa43affc" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.544419 4783 scope.go:117] "RemoveContainer" containerID="ca7a540125edb5ea8f93e4392a57252e540d0f904cfd8a485cd8c030f537dd2b" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.591019 4783 scope.go:117] "RemoveContainer" containerID="dbcbbb2c21a7bbf5f6862e92534c203025b94a1782ab43292ea1052cc9b2702e" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.627950 4783 scope.go:117] "RemoveContainer" containerID="45d206da77fcb67e119f037fcfa5d0b583642d09013a408c9ef81aa0123902e0" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.660460 4783 scope.go:117] "RemoveContainer" containerID="74ad5198c33bca2dc2c18003a512d1e4a71bde8c329802d06a71c3b072d428d1" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.682057 4783 scope.go:117] "RemoveContainer" containerID="d32de830a691d8a9e36aaad1a0ce5660cd686be4c09ee1c140bec330697b3d16" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.704871 4783 scope.go:117] "RemoveContainer" containerID="a292c102416e551d8b6ab46833ffcf17c8dc0518b4802aea9fda0c0279f9dbfa" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.725507 4783 scope.go:117] "RemoveContainer" containerID="237eb38fbdc94904d60796a2236ae34e060003dfa7daad4c18b8375762d75436" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.762473 4783 scope.go:117] "RemoveContainer" containerID="1c45e758b3738d7a28ed877e2f4c5febbca063c9419d9656e6d78dcf68514e8c" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.795166 4783 scope.go:117] "RemoveContainer" containerID="890c5af0166010b924f1827b8ebfc5b1431692aedf4260f23c576544efc94058" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.816639 4783 scope.go:117] "RemoveContainer" containerID="31da0d4fbb6febd8bf5042f2feb8393e2237e495458a723709caf6e0acb16671" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.845994 4783 scope.go:117] "RemoveContainer" containerID="12c7b943e3e515968317bb4e2b36cb7ec137674e1291a8c6474038902d7af06c" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.866808 4783 scope.go:117] "RemoveContainer" containerID="850d2970bc8214d5e1d97a9ad7c93fc9c84be59211bd701c81e64b2ee2ab6b0e" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.893548 4783 scope.go:117] "RemoveContainer" containerID="bc89642497736475077a9ea1aa1c941d6da45da0f7b5e515ee7b27b1d5853429" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.916019 4783 scope.go:117] "RemoveContainer" containerID="de7d813759552084e588f18ac7d2fa4049833b8b17f8a47d3bf8b8918e2749e6" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.944011 4783 scope.go:117] "RemoveContainer" containerID="c50c006678d45fd361c7404fabc08415ff8162e67e9c073e3bba83e1036f18e6" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.968323 4783 scope.go:117] "RemoveContainer" containerID="4a9a7157e02c3b85c335f189160f0f21e4eeb272172ff1581b80ed8d12d5b4b1" Sep 30 14:00:15 crc kubenswrapper[4783]: I0930 14:00:15.987480 4783 scope.go:117] "RemoveContainer" containerID="d9380ce9afc2c1b59bb9f34b66876f19ee2fb153090277a0b45e42e71f7bdc8a" Sep 30 14:00:16 crc kubenswrapper[4783]: I0930 14:00:16.034243 4783 scope.go:117] "RemoveContainer" containerID="a3a24d4d11109348dc5aea74a514101dc4fd4971209e6891ef2e78c506c18182" Sep 30 14:00:16 crc kubenswrapper[4783]: I0930 14:00:16.057799 4783 scope.go:117] "RemoveContainer" containerID="d5c970b20a9e48b0f5f3c33d731065cf4ba92eb88450078d3f4745e63e8925f9" Sep 30 14:00:16 crc kubenswrapper[4783]: I0930 14:00:16.076550 4783 scope.go:117] "RemoveContainer" containerID="c7f3a38555bdabc4adc448d88761adfa98cc0de755f8e23c6c57230e33e5157b" Sep 30 14:01:07 crc kubenswrapper[4783]: I0930 14:01:07.673734 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:01:07 crc kubenswrapper[4783]: I0930 14:01:07.674180 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:01:16 crc kubenswrapper[4783]: I0930 14:01:16.450999 4783 scope.go:117] "RemoveContainer" containerID="ccc560ee8b7452abbe90f3abf8c9d54c86e65c7a9a1531ef75e232716e377ef8" Sep 30 14:01:16 crc kubenswrapper[4783]: I0930 14:01:16.473585 4783 scope.go:117] "RemoveContainer" containerID="1022d5e424762d7e29acf4475bea4e13229a028b0e4feddd4ba6cfac7675e760" Sep 30 14:01:16 crc kubenswrapper[4783]: I0930 14:01:16.504529 4783 scope.go:117] "RemoveContainer" containerID="79d24a63ac0119b9fc4aad736a0d1b85827096188af043727ed0edc34743a6c0" Sep 30 14:01:16 crc kubenswrapper[4783]: I0930 14:01:16.521023 4783 scope.go:117] "RemoveContainer" containerID="efe36767497a9fc9e028d17967e730c37c0c7d9e9dda36bf40da68bab84aeea1" Sep 30 14:01:37 crc kubenswrapper[4783]: I0930 14:01:37.674500 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:01:37 crc kubenswrapper[4783]: I0930 14:01:37.675271 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.673726 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.674328 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.674404 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.675119 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.675210 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" gracePeriod=600 Sep 30 14:02:07 crc kubenswrapper[4783]: E0930 14:02:07.825486 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.847598 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" exitCode=0 Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.847654 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740"} Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.847701 4783 scope.go:117] "RemoveContainer" containerID="edaa8451ea5ff38f645e9552be3529f3e61b692d69e710a73e7a302ef19b35cd" Sep 30 14:02:07 crc kubenswrapper[4783]: I0930 14:02:07.848309 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:02:07 crc kubenswrapper[4783]: E0930 14:02:07.848644 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.577707 4783 scope.go:117] "RemoveContainer" containerID="1c6d456af6501da0a87be7ace5190030c2228ba920c68f561c9e14c6789bd6e6" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.634843 4783 scope.go:117] "RemoveContainer" containerID="61ed2802b332bc985b87c1b28a78873d2729b51598aa9d034d16550c9bab9ad4" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.652672 4783 scope.go:117] "RemoveContainer" containerID="dd4f66a67347ded33053c0941830fe501acc0e03ce96f7d2ca1c96907ddd4983" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.677012 4783 scope.go:117] "RemoveContainer" containerID="31bfe3ba4d5ed2336157dbaeae23b2d50af31aaf1acd49a5e31db7e5406886dd" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.696656 4783 scope.go:117] "RemoveContainer" containerID="df1fc50ced5f6f68fdfaffc104690ef44d70fac76c5ae848f497044022ece357" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.714435 4783 scope.go:117] "RemoveContainer" containerID="59fb5843e88a8d0473293abcbd7df11b4d3a891f25b112ad89c3afdfa2f0b80f" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.733816 4783 scope.go:117] "RemoveContainer" containerID="70af416d1d605f74050168a2f3389c55e91e4acb27456ff5ec693839bb6060e7" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.760298 4783 scope.go:117] "RemoveContainer" containerID="5d04af509a9bb7bfbf8994b131a09676b7cd7a088b0bc9fad1edda3ef3454d1e" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.793164 4783 scope.go:117] "RemoveContainer" containerID="d1e1e216a0a4b5e90fb3243099d733cadcc1f3c31850aa187e3b905cce41f140" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.818630 4783 scope.go:117] "RemoveContainer" containerID="3ae59eb86d91fb5c806e663e95ec547ef87d696b32c8a261a6ab43509cdb555f" Sep 30 14:02:16 crc kubenswrapper[4783]: I0930 14:02:16.843042 4783 scope.go:117] "RemoveContainer" containerID="8a9e4d510cc8a81c1806a28e9179a238fbbf2ed19d8dc5cb74a39c6802f3a6c0" Sep 30 14:02:21 crc kubenswrapper[4783]: I0930 14:02:21.843750 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:02:21 crc kubenswrapper[4783]: E0930 14:02:21.844531 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:02:36 crc kubenswrapper[4783]: I0930 14:02:36.843218 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:02:36 crc kubenswrapper[4783]: E0930 14:02:36.844493 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.930462 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:02:48 crc kubenswrapper[4783]: E0930 14:02:48.931374 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e75c21-2820-4856-8e09-f353a6f0661a" containerName="collect-profiles" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.931388 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e75c21-2820-4856-8e09-f353a6f0661a" containerName="collect-profiles" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.931572 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e75c21-2820-4856-8e09-f353a6f0661a" containerName="collect-profiles" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.934479 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.964411 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.989163 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.989292 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn6qb\" (UniqueName: \"kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:48 crc kubenswrapper[4783]: I0930 14:02:48.989321 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.090721 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.091032 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn6qb\" (UniqueName: \"kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.091127 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.091242 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.092029 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.113921 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn6qb\" (UniqueName: \"kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb\") pod \"community-operators-pxqjl\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.259884 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:49 crc kubenswrapper[4783]: I0930 14:02:49.741182 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:02:50 crc kubenswrapper[4783]: I0930 14:02:50.262887 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerID="0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474" exitCode=0 Sep 30 14:02:50 crc kubenswrapper[4783]: I0930 14:02:50.262972 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerDied","Data":"0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474"} Sep 30 14:02:50 crc kubenswrapper[4783]: I0930 14:02:50.263385 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerStarted","Data":"28e03b2bdc8b0b4ce9a1500103e8cf58c5da70aee6b15fdb8ecbb43ac1bc76e2"} Sep 30 14:02:50 crc kubenswrapper[4783]: I0930 14:02:50.264892 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.129146 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.131108 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.144002 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.346270 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.346635 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.346729 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqrgj\" (UniqueName: \"kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.448016 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.448664 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.448823 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqrgj\" (UniqueName: \"kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.448905 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.449436 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.471439 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqrgj\" (UniqueName: \"kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj\") pod \"redhat-marketplace-mn69t\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.653714 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:02:51 crc kubenswrapper[4783]: I0930 14:02:51.844257 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:02:51 crc kubenswrapper[4783]: E0930 14:02:51.844829 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.074191 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.368352 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerID="37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b" exitCode=0 Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.368424 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerDied","Data":"37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b"} Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.372965 4783 generic.go:334] "Generic (PLEG): container finished" podID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerID="8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67" exitCode=0 Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.373023 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerDied","Data":"8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67"} Sep 30 14:02:52 crc kubenswrapper[4783]: I0930 14:02:52.373062 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerStarted","Data":"acaab55c6276e28c1f0ba190e19247cad108ac4e959f1a7bd71194c9c3616cdf"} Sep 30 14:02:53 crc kubenswrapper[4783]: I0930 14:02:53.389545 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerStarted","Data":"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38"} Sep 30 14:02:53 crc kubenswrapper[4783]: I0930 14:02:53.412660 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pxqjl" podStartSLOduration=2.813879028 podStartE2EDuration="5.412642952s" podCreationTimestamp="2025-09-30 14:02:48 +0000 UTC" firstStartedPulling="2025-09-30 14:02:50.264692665 +0000 UTC m=+1670.196158972" lastFinishedPulling="2025-09-30 14:02:52.863456589 +0000 UTC m=+1672.794922896" observedRunningTime="2025-09-30 14:02:53.408412297 +0000 UTC m=+1673.339878614" watchObservedRunningTime="2025-09-30 14:02:53.412642952 +0000 UTC m=+1673.344109259" Sep 30 14:02:54 crc kubenswrapper[4783]: I0930 14:02:54.399081 4783 generic.go:334] "Generic (PLEG): container finished" podID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerID="7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2" exitCode=0 Sep 30 14:02:54 crc kubenswrapper[4783]: I0930 14:02:54.399196 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerDied","Data":"7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2"} Sep 30 14:02:55 crc kubenswrapper[4783]: I0930 14:02:55.416157 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerStarted","Data":"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48"} Sep 30 14:02:55 crc kubenswrapper[4783]: I0930 14:02:55.447646 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mn69t" podStartSLOduration=1.868232694 podStartE2EDuration="4.447620602s" podCreationTimestamp="2025-09-30 14:02:51 +0000 UTC" firstStartedPulling="2025-09-30 14:02:52.375141508 +0000 UTC m=+1672.306607815" lastFinishedPulling="2025-09-30 14:02:54.954529406 +0000 UTC m=+1674.885995723" observedRunningTime="2025-09-30 14:02:55.437816996 +0000 UTC m=+1675.369283313" watchObservedRunningTime="2025-09-30 14:02:55.447620602 +0000 UTC m=+1675.379086949" Sep 30 14:02:59 crc kubenswrapper[4783]: I0930 14:02:59.260106 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:59 crc kubenswrapper[4783]: I0930 14:02:59.261468 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:59 crc kubenswrapper[4783]: I0930 14:02:59.324403 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:59 crc kubenswrapper[4783]: I0930 14:02:59.511245 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:02:59 crc kubenswrapper[4783]: I0930 14:02:59.718571 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:03:01 crc kubenswrapper[4783]: I0930 14:03:01.467560 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pxqjl" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="registry-server" containerID="cri-o://88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38" gracePeriod=2 Sep 30 14:03:01 crc kubenswrapper[4783]: I0930 14:03:01.653967 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:01 crc kubenswrapper[4783]: I0930 14:03:01.654377 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:01 crc kubenswrapper[4783]: I0930 14:03:01.719786 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:01 crc kubenswrapper[4783]: I0930 14:03:01.848600 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.028260 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities\") pod \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.028381 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn6qb\" (UniqueName: \"kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb\") pod \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.028485 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content\") pod \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\" (UID: \"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8\") " Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.030424 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities" (OuterVolumeSpecName: "utilities") pod "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" (UID: "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.034905 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb" (OuterVolumeSpecName: "kube-api-access-mn6qb") pod "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" (UID: "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8"). InnerVolumeSpecName "kube-api-access-mn6qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.098540 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" (UID: "d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.130858 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.130907 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.130919 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn6qb\" (UniqueName: \"kubernetes.io/projected/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8-kube-api-access-mn6qb\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.477612 4783 generic.go:334] "Generic (PLEG): container finished" podID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerID="88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38" exitCode=0 Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.477675 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerDied","Data":"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38"} Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.478823 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxqjl" event={"ID":"d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8","Type":"ContainerDied","Data":"28e03b2bdc8b0b4ce9a1500103e8cf58c5da70aee6b15fdb8ecbb43ac1bc76e2"} Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.477752 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxqjl" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.478869 4783 scope.go:117] "RemoveContainer" containerID="88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.521862 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.523931 4783 scope.go:117] "RemoveContainer" containerID="37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.526799 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pxqjl"] Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.528145 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.552719 4783 scope.go:117] "RemoveContainer" containerID="0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.570399 4783 scope.go:117] "RemoveContainer" containerID="88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38" Sep 30 14:03:02 crc kubenswrapper[4783]: E0930 14:03:02.571093 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38\": container with ID starting with 88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38 not found: ID does not exist" containerID="88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.571173 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38"} err="failed to get container status \"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38\": rpc error: code = NotFound desc = could not find container \"88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38\": container with ID starting with 88bfc5e1e44d19d209eb182ed817395d1543c2c6a5b54b1c1a2c20f194af8c38 not found: ID does not exist" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.571201 4783 scope.go:117] "RemoveContainer" containerID="37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b" Sep 30 14:03:02 crc kubenswrapper[4783]: E0930 14:03:02.571503 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b\": container with ID starting with 37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b not found: ID does not exist" containerID="37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.571531 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b"} err="failed to get container status \"37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b\": rpc error: code = NotFound desc = could not find container \"37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b\": container with ID starting with 37cfe560478a84daeb8718320aa7f4c86114ad3c37fa33968db9f54cd35d815b not found: ID does not exist" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.571551 4783 scope.go:117] "RemoveContainer" containerID="0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474" Sep 30 14:03:02 crc kubenswrapper[4783]: E0930 14:03:02.571835 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474\": container with ID starting with 0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474 not found: ID does not exist" containerID="0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.571956 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474"} err="failed to get container status \"0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474\": rpc error: code = NotFound desc = could not find container \"0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474\": container with ID starting with 0de93101a6a2a47b82e41de2e7df7477eca329072cc4e6e0aa584f342cf44474 not found: ID does not exist" Sep 30 14:03:02 crc kubenswrapper[4783]: I0930 14:03:02.859398 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" path="/var/lib/kubelet/pods/d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8/volumes" Sep 30 14:03:04 crc kubenswrapper[4783]: I0930 14:03:04.843149 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:03:04 crc kubenswrapper[4783]: E0930 14:03:04.843613 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:03:04 crc kubenswrapper[4783]: I0930 14:03:04.917513 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:03:04 crc kubenswrapper[4783]: I0930 14:03:04.917757 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mn69t" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="registry-server" containerID="cri-o://62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48" gracePeriod=2 Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.311102 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.388141 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqrgj\" (UniqueName: \"kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj\") pod \"09f7403b-3813-4136-8a80-bf0f23ec4667\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.388208 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities\") pod \"09f7403b-3813-4136-8a80-bf0f23ec4667\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.388246 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content\") pod \"09f7403b-3813-4136-8a80-bf0f23ec4667\" (UID: \"09f7403b-3813-4136-8a80-bf0f23ec4667\") " Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.389316 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities" (OuterVolumeSpecName: "utilities") pod "09f7403b-3813-4136-8a80-bf0f23ec4667" (UID: "09f7403b-3813-4136-8a80-bf0f23ec4667"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.394118 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj" (OuterVolumeSpecName: "kube-api-access-dqrgj") pod "09f7403b-3813-4136-8a80-bf0f23ec4667" (UID: "09f7403b-3813-4136-8a80-bf0f23ec4667"). InnerVolumeSpecName "kube-api-access-dqrgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.401982 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09f7403b-3813-4136-8a80-bf0f23ec4667" (UID: "09f7403b-3813-4136-8a80-bf0f23ec4667"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.490286 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqrgj\" (UniqueName: \"kubernetes.io/projected/09f7403b-3813-4136-8a80-bf0f23ec4667-kube-api-access-dqrgj\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.490325 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.490335 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f7403b-3813-4136-8a80-bf0f23ec4667-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.506190 4783 generic.go:334] "Generic (PLEG): container finished" podID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerID="62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48" exitCode=0 Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.506274 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerDied","Data":"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48"} Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.506310 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mn69t" event={"ID":"09f7403b-3813-4136-8a80-bf0f23ec4667","Type":"ContainerDied","Data":"acaab55c6276e28c1f0ba190e19247cad108ac4e959f1a7bd71194c9c3616cdf"} Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.506311 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mn69t" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.506328 4783 scope.go:117] "RemoveContainer" containerID="62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.540196 4783 scope.go:117] "RemoveContainer" containerID="7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.541449 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.547460 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mn69t"] Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.570750 4783 scope.go:117] "RemoveContainer" containerID="8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.586509 4783 scope.go:117] "RemoveContainer" containerID="62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48" Sep 30 14:03:05 crc kubenswrapper[4783]: E0930 14:03:05.586894 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48\": container with ID starting with 62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48 not found: ID does not exist" containerID="62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.586943 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48"} err="failed to get container status \"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48\": rpc error: code = NotFound desc = could not find container \"62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48\": container with ID starting with 62af09c2180fed8013bb9d51ead6d6f6d4cd20f9fdef3e22f551531ccd2afb48 not found: ID does not exist" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.586971 4783 scope.go:117] "RemoveContainer" containerID="7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2" Sep 30 14:03:05 crc kubenswrapper[4783]: E0930 14:03:05.587265 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2\": container with ID starting with 7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2 not found: ID does not exist" containerID="7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.587281 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2"} err="failed to get container status \"7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2\": rpc error: code = NotFound desc = could not find container \"7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2\": container with ID starting with 7cf60d9c76e6c2ca54006fa01581920e704ff6068016c82e2d5745c8d65895a2 not found: ID does not exist" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.587293 4783 scope.go:117] "RemoveContainer" containerID="8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67" Sep 30 14:03:05 crc kubenswrapper[4783]: E0930 14:03:05.587489 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67\": container with ID starting with 8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67 not found: ID does not exist" containerID="8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67" Sep 30 14:03:05 crc kubenswrapper[4783]: I0930 14:03:05.587520 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67"} err="failed to get container status \"8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67\": rpc error: code = NotFound desc = could not find container \"8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67\": container with ID starting with 8a3e3f4823fa8d6d22adb20e493db5fc17e92f896b7213110297af67d861fe67 not found: ID does not exist" Sep 30 14:03:06 crc kubenswrapper[4783]: I0930 14:03:06.863385 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" path="/var/lib/kubelet/pods/09f7403b-3813-4136-8a80-bf0f23ec4667/volumes" Sep 30 14:03:16 crc kubenswrapper[4783]: I0930 14:03:16.965333 4783 scope.go:117] "RemoveContainer" containerID="d40de3bbab8c0ae54ee3ede870b97a3dc0b58b8f8e8ce00329d52caf9cd21044" Sep 30 14:03:17 crc kubenswrapper[4783]: I0930 14:03:17.026838 4783 scope.go:117] "RemoveContainer" containerID="73cf2228317a8d20ed34573779cec6df5113380b476db226cf7b16fb89423412" Sep 30 14:03:17 crc kubenswrapper[4783]: I0930 14:03:17.047826 4783 scope.go:117] "RemoveContainer" containerID="e6fddfa8b81d2a83d3447008627b220e5a1a82c63b8656f14d2cbc72ff3c8250" Sep 30 14:03:17 crc kubenswrapper[4783]: I0930 14:03:17.071268 4783 scope.go:117] "RemoveContainer" containerID="1a2d3e8f3026d5fdf54bd68bc0e37a6babc472452cfbc73d9d47792a666e28be" Sep 30 14:03:17 crc kubenswrapper[4783]: I0930 14:03:17.092322 4783 scope.go:117] "RemoveContainer" containerID="1add237bfa70eb79e74526b5f2695b642b54a5e84d25e87dae1aef8e46b7ab79" Sep 30 14:03:19 crc kubenswrapper[4783]: I0930 14:03:19.842944 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:03:19 crc kubenswrapper[4783]: E0930 14:03:19.843247 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:03:34 crc kubenswrapper[4783]: I0930 14:03:34.842674 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:03:34 crc kubenswrapper[4783]: E0930 14:03:34.843422 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:03:48 crc kubenswrapper[4783]: I0930 14:03:48.843896 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:03:48 crc kubenswrapper[4783]: E0930 14:03:48.844665 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:04:01 crc kubenswrapper[4783]: I0930 14:04:01.843538 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:04:01 crc kubenswrapper[4783]: E0930 14:04:01.844243 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:04:15 crc kubenswrapper[4783]: I0930 14:04:15.843778 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:04:15 crc kubenswrapper[4783]: E0930 14:04:15.845022 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:04:17 crc kubenswrapper[4783]: I0930 14:04:17.208800 4783 scope.go:117] "RemoveContainer" containerID="ab6864ab16c498e4305ddbe1aa4bbbb37acd994f33c75be907ff187ab43dcdff" Sep 30 14:04:17 crc kubenswrapper[4783]: I0930 14:04:17.242547 4783 scope.go:117] "RemoveContainer" containerID="c0370e9e7d864c2c9f1bb02da815cdefb288c220f43bfd64692abc5efb6adec5" Sep 30 14:04:17 crc kubenswrapper[4783]: I0930 14:04:17.270642 4783 scope.go:117] "RemoveContainer" containerID="9b70fe082872f9b22769a85eabc84223b0211c0b3c90647cad99c0c149caec1e" Sep 30 14:04:30 crc kubenswrapper[4783]: I0930 14:04:30.851284 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:04:30 crc kubenswrapper[4783]: E0930 14:04:30.852054 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:04:44 crc kubenswrapper[4783]: I0930 14:04:44.843762 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:04:44 crc kubenswrapper[4783]: E0930 14:04:44.844766 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:04:56 crc kubenswrapper[4783]: I0930 14:04:56.843035 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:04:56 crc kubenswrapper[4783]: E0930 14:04:56.843871 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:05:09 crc kubenswrapper[4783]: I0930 14:05:09.844157 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:05:09 crc kubenswrapper[4783]: E0930 14:05:09.845616 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:05:17 crc kubenswrapper[4783]: I0930 14:05:17.369752 4783 scope.go:117] "RemoveContainer" containerID="5583f7bb77f96137dbfbbe3a2cfb849fa76f09f327e0249ebad7f72e4e2cb2d5" Sep 30 14:05:20 crc kubenswrapper[4783]: I0930 14:05:20.849337 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:05:20 crc kubenswrapper[4783]: E0930 14:05:20.850516 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:05:34 crc kubenswrapper[4783]: I0930 14:05:34.843282 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:05:34 crc kubenswrapper[4783]: E0930 14:05:34.844271 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:05:46 crc kubenswrapper[4783]: I0930 14:05:46.844459 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:05:46 crc kubenswrapper[4783]: E0930 14:05:46.845375 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:05:58 crc kubenswrapper[4783]: I0930 14:05:58.844491 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:05:58 crc kubenswrapper[4783]: E0930 14:05:58.845660 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:06:12 crc kubenswrapper[4783]: I0930 14:06:12.842937 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:06:12 crc kubenswrapper[4783]: E0930 14:06:12.843642 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:06:25 crc kubenswrapper[4783]: I0930 14:06:25.843093 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:06:25 crc kubenswrapper[4783]: E0930 14:06:25.843826 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:06:38 crc kubenswrapper[4783]: I0930 14:06:38.843457 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:06:38 crc kubenswrapper[4783]: E0930 14:06:38.844115 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:06:51 crc kubenswrapper[4783]: I0930 14:06:51.842922 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:06:51 crc kubenswrapper[4783]: E0930 14:06:51.845035 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:07:04 crc kubenswrapper[4783]: I0930 14:07:04.843355 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:07:04 crc kubenswrapper[4783]: E0930 14:07:04.844427 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:07:15 crc kubenswrapper[4783]: I0930 14:07:15.843237 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:07:16 crc kubenswrapper[4783]: I0930 14:07:16.535193 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b"} Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.076952 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.077907 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.077925 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.077940 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="extract-content" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.077949 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="extract-content" Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.077966 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="extract-content" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.077976 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="extract-content" Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.077987 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="extract-utilities" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.077995 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="extract-utilities" Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.078015 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.078024 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: E0930 14:07:54.078045 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="extract-utilities" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.078054 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="extract-utilities" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.078254 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="09f7403b-3813-4136-8a80-bf0f23ec4667" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.078282 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4ca43b2-44ae-4ca7-8d8e-753ac0e92be8" containerName="registry-server" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.079578 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.100567 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.239417 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpqr9\" (UniqueName: \"kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.239560 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.239637 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.340656 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.340772 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.340806 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpqr9\" (UniqueName: \"kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.341409 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.341492 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.361934 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpqr9\" (UniqueName: \"kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9\") pod \"redhat-operators-cfjzf\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.406085 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.642738 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:07:54 crc kubenswrapper[4783]: I0930 14:07:54.851867 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerStarted","Data":"33ba1732fd85a7bdb35eeba89bf23bbf21921fc0a784e6b8325ba1fa4099757a"} Sep 30 14:07:55 crc kubenswrapper[4783]: I0930 14:07:55.858994 4783 generic.go:334] "Generic (PLEG): container finished" podID="89af1656-08a1-43dc-8b27-54064109f752" containerID="bb58a98fc2b209b3b884437fe51d2bf880d8fc0f31576002267ba784f9b6a5a3" exitCode=0 Sep 30 14:07:55 crc kubenswrapper[4783]: I0930 14:07:55.859063 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerDied","Data":"bb58a98fc2b209b3b884437fe51d2bf880d8fc0f31576002267ba784f9b6a5a3"} Sep 30 14:07:55 crc kubenswrapper[4783]: I0930 14:07:55.862860 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:07:56 crc kubenswrapper[4783]: I0930 14:07:56.870040 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerStarted","Data":"b1f494063df568901bc13a612ae3f4fea9776c8c5965964b376c246e5b1d4e93"} Sep 30 14:07:57 crc kubenswrapper[4783]: I0930 14:07:57.879825 4783 generic.go:334] "Generic (PLEG): container finished" podID="89af1656-08a1-43dc-8b27-54064109f752" containerID="b1f494063df568901bc13a612ae3f4fea9776c8c5965964b376c246e5b1d4e93" exitCode=0 Sep 30 14:07:57 crc kubenswrapper[4783]: I0930 14:07:57.879870 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerDied","Data":"b1f494063df568901bc13a612ae3f4fea9776c8c5965964b376c246e5b1d4e93"} Sep 30 14:07:58 crc kubenswrapper[4783]: I0930 14:07:58.888830 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerStarted","Data":"da7d7e55238352bc629ced8f7902a3b7f6d3702f0015657e8b9b5454a9dd89d3"} Sep 30 14:07:58 crc kubenswrapper[4783]: I0930 14:07:58.912549 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cfjzf" podStartSLOduration=2.4311966910000002 podStartE2EDuration="4.912528726s" podCreationTimestamp="2025-09-30 14:07:54 +0000 UTC" firstStartedPulling="2025-09-30 14:07:55.862419037 +0000 UTC m=+1975.793885364" lastFinishedPulling="2025-09-30 14:07:58.343751052 +0000 UTC m=+1978.275217399" observedRunningTime="2025-09-30 14:07:58.906269846 +0000 UTC m=+1978.837736173" watchObservedRunningTime="2025-09-30 14:07:58.912528726 +0000 UTC m=+1978.843995053" Sep 30 14:08:04 crc kubenswrapper[4783]: I0930 14:08:04.407136 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:04 crc kubenswrapper[4783]: I0930 14:08:04.407781 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:04 crc kubenswrapper[4783]: I0930 14:08:04.460920 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:04 crc kubenswrapper[4783]: I0930 14:08:04.970599 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:07 crc kubenswrapper[4783]: I0930 14:08:07.269760 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:08:07 crc kubenswrapper[4783]: I0930 14:08:07.270166 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cfjzf" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="registry-server" containerID="cri-o://da7d7e55238352bc629ced8f7902a3b7f6d3702f0015657e8b9b5454a9dd89d3" gracePeriod=2 Sep 30 14:08:07 crc kubenswrapper[4783]: I0930 14:08:07.953110 4783 generic.go:334] "Generic (PLEG): container finished" podID="89af1656-08a1-43dc-8b27-54064109f752" containerID="da7d7e55238352bc629ced8f7902a3b7f6d3702f0015657e8b9b5454a9dd89d3" exitCode=0 Sep 30 14:08:07 crc kubenswrapper[4783]: I0930 14:08:07.953186 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerDied","Data":"da7d7e55238352bc629ced8f7902a3b7f6d3702f0015657e8b9b5454a9dd89d3"} Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.184982 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.357667 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities\") pod \"89af1656-08a1-43dc-8b27-54064109f752\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.357840 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpqr9\" (UniqueName: \"kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9\") pod \"89af1656-08a1-43dc-8b27-54064109f752\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.357883 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content\") pod \"89af1656-08a1-43dc-8b27-54064109f752\" (UID: \"89af1656-08a1-43dc-8b27-54064109f752\") " Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.359322 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities" (OuterVolumeSpecName: "utilities") pod "89af1656-08a1-43dc-8b27-54064109f752" (UID: "89af1656-08a1-43dc-8b27-54064109f752"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.364262 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9" (OuterVolumeSpecName: "kube-api-access-lpqr9") pod "89af1656-08a1-43dc-8b27-54064109f752" (UID: "89af1656-08a1-43dc-8b27-54064109f752"). InnerVolumeSpecName "kube-api-access-lpqr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.456722 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89af1656-08a1-43dc-8b27-54064109f752" (UID: "89af1656-08a1-43dc-8b27-54064109f752"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.459296 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.459339 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpqr9\" (UniqueName: \"kubernetes.io/projected/89af1656-08a1-43dc-8b27-54064109f752-kube-api-access-lpqr9\") on node \"crc\" DevicePath \"\"" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.459357 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89af1656-08a1-43dc-8b27-54064109f752-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.966053 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfjzf" event={"ID":"89af1656-08a1-43dc-8b27-54064109f752","Type":"ContainerDied","Data":"33ba1732fd85a7bdb35eeba89bf23bbf21921fc0a784e6b8325ba1fa4099757a"} Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.966093 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfjzf" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.966150 4783 scope.go:117] "RemoveContainer" containerID="da7d7e55238352bc629ced8f7902a3b7f6d3702f0015657e8b9b5454a9dd89d3" Sep 30 14:08:08 crc kubenswrapper[4783]: I0930 14:08:08.999067 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:08:09 crc kubenswrapper[4783]: I0930 14:08:09.008585 4783 scope.go:117] "RemoveContainer" containerID="b1f494063df568901bc13a612ae3f4fea9776c8c5965964b376c246e5b1d4e93" Sep 30 14:08:09 crc kubenswrapper[4783]: I0930 14:08:09.012476 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cfjzf"] Sep 30 14:08:09 crc kubenswrapper[4783]: I0930 14:08:09.033366 4783 scope.go:117] "RemoveContainer" containerID="bb58a98fc2b209b3b884437fe51d2bf880d8fc0f31576002267ba784f9b6a5a3" Sep 30 14:08:10 crc kubenswrapper[4783]: I0930 14:08:10.870056 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89af1656-08a1-43dc-8b27-54064109f752" path="/var/lib/kubelet/pods/89af1656-08a1-43dc-8b27-54064109f752/volumes" Sep 30 14:09:37 crc kubenswrapper[4783]: I0930 14:09:37.673819 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:09:37 crc kubenswrapper[4783]: I0930 14:09:37.674475 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:10:07 crc kubenswrapper[4783]: I0930 14:10:07.673952 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:10:07 crc kubenswrapper[4783]: I0930 14:10:07.675436 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.357776 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:14 crc kubenswrapper[4783]: E0930 14:10:14.358665 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="registry-server" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.358682 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="registry-server" Sep 30 14:10:14 crc kubenswrapper[4783]: E0930 14:10:14.358702 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="extract-content" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.358711 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="extract-content" Sep 30 14:10:14 crc kubenswrapper[4783]: E0930 14:10:14.358728 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="extract-utilities" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.358735 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="extract-utilities" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.358913 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="89af1656-08a1-43dc-8b27-54064109f752" containerName="registry-server" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.360199 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.373846 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.393833 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d696r\" (UniqueName: \"kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.393914 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.393940 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.494768 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d696r\" (UniqueName: \"kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.494851 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.494875 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.495469 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.496431 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.516441 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d696r\" (UniqueName: \"kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r\") pod \"certified-operators-5ppl2\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.680920 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:14 crc kubenswrapper[4783]: I0930 14:10:14.995599 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:15 crc kubenswrapper[4783]: I0930 14:10:15.053123 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerStarted","Data":"d3c976931422ea4c810ce5645189b43c3af704984abdf1c0a4d194a0c19c3656"} Sep 30 14:10:16 crc kubenswrapper[4783]: I0930 14:10:16.064182 4783 generic.go:334] "Generic (PLEG): container finished" podID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerID="d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213" exitCode=0 Sep 30 14:10:16 crc kubenswrapper[4783]: I0930 14:10:16.064280 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerDied","Data":"d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213"} Sep 30 14:10:17 crc kubenswrapper[4783]: I0930 14:10:17.072391 4783 generic.go:334] "Generic (PLEG): container finished" podID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerID="e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba" exitCode=0 Sep 30 14:10:17 crc kubenswrapper[4783]: I0930 14:10:17.072494 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerDied","Data":"e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba"} Sep 30 14:10:18 crc kubenswrapper[4783]: I0930 14:10:18.083482 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerStarted","Data":"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9"} Sep 30 14:10:18 crc kubenswrapper[4783]: I0930 14:10:18.110742 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5ppl2" podStartSLOduration=2.638596824 podStartE2EDuration="4.110722835s" podCreationTimestamp="2025-09-30 14:10:14 +0000 UTC" firstStartedPulling="2025-09-30 14:10:16.066985455 +0000 UTC m=+2115.998451802" lastFinishedPulling="2025-09-30 14:10:17.539111486 +0000 UTC m=+2117.470577813" observedRunningTime="2025-09-30 14:10:18.105360307 +0000 UTC m=+2118.036826624" watchObservedRunningTime="2025-09-30 14:10:18.110722835 +0000 UTC m=+2118.042189142" Sep 30 14:10:24 crc kubenswrapper[4783]: I0930 14:10:24.682772 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:24 crc kubenswrapper[4783]: I0930 14:10:24.683185 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:24 crc kubenswrapper[4783]: I0930 14:10:24.730797 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:25 crc kubenswrapper[4783]: I0930 14:10:25.200568 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:25 crc kubenswrapper[4783]: I0930 14:10:25.244923 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.150862 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5ppl2" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="registry-server" containerID="cri-o://b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9" gracePeriod=2 Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.546699 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.681910 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities\") pod \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.682029 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d696r\" (UniqueName: \"kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r\") pod \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.682173 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content\") pod \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\" (UID: \"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3\") " Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.682836 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities" (OuterVolumeSpecName: "utilities") pod "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" (UID: "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.688286 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r" (OuterVolumeSpecName: "kube-api-access-d696r") pod "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" (UID: "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3"). InnerVolumeSpecName "kube-api-access-d696r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.752089 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" (UID: "df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.783662 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.783708 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d696r\" (UniqueName: \"kubernetes.io/projected/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-kube-api-access-d696r\") on node \"crc\" DevicePath \"\"" Sep 30 14:10:27 crc kubenswrapper[4783]: I0930 14:10:27.783718 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.160852 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerDied","Data":"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9"} Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.160898 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ppl2" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.160820 4783 generic.go:334] "Generic (PLEG): container finished" podID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerID="b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9" exitCode=0 Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.160953 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ppl2" event={"ID":"df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3","Type":"ContainerDied","Data":"d3c976931422ea4c810ce5645189b43c3af704984abdf1c0a4d194a0c19c3656"} Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.160924 4783 scope.go:117] "RemoveContainer" containerID="b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.193685 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.194879 4783 scope.go:117] "RemoveContainer" containerID="e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.199116 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5ppl2"] Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.214537 4783 scope.go:117] "RemoveContainer" containerID="d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.247983 4783 scope.go:117] "RemoveContainer" containerID="b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9" Sep 30 14:10:28 crc kubenswrapper[4783]: E0930 14:10:28.248560 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9\": container with ID starting with b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9 not found: ID does not exist" containerID="b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.248614 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9"} err="failed to get container status \"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9\": rpc error: code = NotFound desc = could not find container \"b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9\": container with ID starting with b0caf0a2b7aa1a80492b1d10124905dbbcff11b525381be77a36f2580493bae9 not found: ID does not exist" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.248644 4783 scope.go:117] "RemoveContainer" containerID="e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba" Sep 30 14:10:28 crc kubenswrapper[4783]: E0930 14:10:28.249126 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba\": container with ID starting with e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba not found: ID does not exist" containerID="e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.249164 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba"} err="failed to get container status \"e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba\": rpc error: code = NotFound desc = could not find container \"e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba\": container with ID starting with e8d5a487457634c05ecce2f7de0a815a1e69a777551955059a68a6df87b28eba not found: ID does not exist" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.249189 4783 scope.go:117] "RemoveContainer" containerID="d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213" Sep 30 14:10:28 crc kubenswrapper[4783]: E0930 14:10:28.249571 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213\": container with ID starting with d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213 not found: ID does not exist" containerID="d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.249612 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213"} err="failed to get container status \"d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213\": rpc error: code = NotFound desc = could not find container \"d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213\": container with ID starting with d390928f0527f63ee3f4620a2c79ecdc24956e722647d7509b2e1a6d44922213 not found: ID does not exist" Sep 30 14:10:28 crc kubenswrapper[4783]: I0930 14:10:28.861506 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" path="/var/lib/kubelet/pods/df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3/volumes" Sep 30 14:10:37 crc kubenswrapper[4783]: I0930 14:10:37.674380 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:10:37 crc kubenswrapper[4783]: I0930 14:10:37.675069 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:10:37 crc kubenswrapper[4783]: I0930 14:10:37.675134 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:10:37 crc kubenswrapper[4783]: I0930 14:10:37.676122 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:10:37 crc kubenswrapper[4783]: I0930 14:10:37.676255 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b" gracePeriod=600 Sep 30 14:10:38 crc kubenswrapper[4783]: I0930 14:10:38.241816 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b" exitCode=0 Sep 30 14:10:38 crc kubenswrapper[4783]: I0930 14:10:38.241923 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b"} Sep 30 14:10:38 crc kubenswrapper[4783]: I0930 14:10:38.242439 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa"} Sep 30 14:10:38 crc kubenswrapper[4783]: I0930 14:10:38.242470 4783 scope.go:117] "RemoveContainer" containerID="10370240a9dd9659e96fea2356985a5c5d8dd1d952b46304845c48c5adfb4740" Sep 30 14:12:37 crc kubenswrapper[4783]: I0930 14:12:37.674720 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:12:37 crc kubenswrapper[4783]: I0930 14:12:37.675375 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.909884 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:12:50 crc kubenswrapper[4783]: E0930 14:12:50.910608 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="registry-server" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.910619 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="registry-server" Sep 30 14:12:50 crc kubenswrapper[4783]: E0930 14:12:50.910630 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="extract-utilities" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.910637 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="extract-utilities" Sep 30 14:12:50 crc kubenswrapper[4783]: E0930 14:12:50.910664 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="extract-content" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.910670 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="extract-content" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.910792 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="df8475aa-eb1a-4fad-b75d-82e8bf2d6ba3" containerName="registry-server" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.912019 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:50 crc kubenswrapper[4783]: I0930 14:12:50.931388 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.039332 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.039425 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.039508 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvzfx\" (UniqueName: \"kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.141112 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvzfx\" (UniqueName: \"kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.141201 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.141238 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.141824 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.141935 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.168344 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvzfx\" (UniqueName: \"kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx\") pod \"community-operators-nxn7m\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.241673 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:12:51 crc kubenswrapper[4783]: I0930 14:12:51.532580 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:12:52 crc kubenswrapper[4783]: I0930 14:12:52.351312 4783 generic.go:334] "Generic (PLEG): container finished" podID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerID="d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e" exitCode=0 Sep 30 14:12:52 crc kubenswrapper[4783]: I0930 14:12:52.351488 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerDied","Data":"d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e"} Sep 30 14:12:52 crc kubenswrapper[4783]: I0930 14:12:52.351700 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerStarted","Data":"f91b6d53066a8e08f3204233b2142fb7e9e3bc12a3895652ec1505043c3bb63b"} Sep 30 14:12:53 crc kubenswrapper[4783]: I0930 14:12:53.360943 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerStarted","Data":"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd"} Sep 30 14:12:54 crc kubenswrapper[4783]: I0930 14:12:54.370078 4783 generic.go:334] "Generic (PLEG): container finished" podID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerID="fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd" exitCode=0 Sep 30 14:12:54 crc kubenswrapper[4783]: I0930 14:12:54.370114 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerDied","Data":"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd"} Sep 30 14:12:55 crc kubenswrapper[4783]: I0930 14:12:55.379176 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerStarted","Data":"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e"} Sep 30 14:12:55 crc kubenswrapper[4783]: I0930 14:12:55.398259 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nxn7m" podStartSLOduration=2.858785134 podStartE2EDuration="5.398241862s" podCreationTimestamp="2025-09-30 14:12:50 +0000 UTC" firstStartedPulling="2025-09-30 14:12:52.35323854 +0000 UTC m=+2272.284704847" lastFinishedPulling="2025-09-30 14:12:54.892695268 +0000 UTC m=+2274.824161575" observedRunningTime="2025-09-30 14:12:55.397621233 +0000 UTC m=+2275.329087550" watchObservedRunningTime="2025-09-30 14:12:55.398241862 +0000 UTC m=+2275.329708169" Sep 30 14:13:01 crc kubenswrapper[4783]: I0930 14:13:01.242923 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:01 crc kubenswrapper[4783]: I0930 14:13:01.243355 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:01 crc kubenswrapper[4783]: I0930 14:13:01.281262 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:01 crc kubenswrapper[4783]: I0930 14:13:01.487876 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:01 crc kubenswrapper[4783]: I0930 14:13:01.528448 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.438643 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nxn7m" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="registry-server" containerID="cri-o://5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e" gracePeriod=2 Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.802470 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.836026 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content\") pod \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.836209 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvzfx\" (UniqueName: \"kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx\") pod \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.836277 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities\") pod \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\" (UID: \"5c47f9a9-7650-4052-bf4a-3ffb2e98255a\") " Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.837834 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities" (OuterVolumeSpecName: "utilities") pod "5c47f9a9-7650-4052-bf4a-3ffb2e98255a" (UID: "5c47f9a9-7650-4052-bf4a-3ffb2e98255a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.843622 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx" (OuterVolumeSpecName: "kube-api-access-fvzfx") pod "5c47f9a9-7650-4052-bf4a-3ffb2e98255a" (UID: "5c47f9a9-7650-4052-bf4a-3ffb2e98255a"). InnerVolumeSpecName "kube-api-access-fvzfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.887603 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c47f9a9-7650-4052-bf4a-3ffb2e98255a" (UID: "5c47f9a9-7650-4052-bf4a-3ffb2e98255a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.938153 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvzfx\" (UniqueName: \"kubernetes.io/projected/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-kube-api-access-fvzfx\") on node \"crc\" DevicePath \"\"" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.938506 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:13:03 crc kubenswrapper[4783]: I0930 14:13:03.938591 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c47f9a9-7650-4052-bf4a-3ffb2e98255a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.451543 4783 generic.go:334] "Generic (PLEG): container finished" podID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerID="5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e" exitCode=0 Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.451608 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerDied","Data":"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e"} Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.451669 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nxn7m" event={"ID":"5c47f9a9-7650-4052-bf4a-3ffb2e98255a","Type":"ContainerDied","Data":"f91b6d53066a8e08f3204233b2142fb7e9e3bc12a3895652ec1505043c3bb63b"} Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.451688 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nxn7m" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.451701 4783 scope.go:117] "RemoveContainer" containerID="5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.483818 4783 scope.go:117] "RemoveContainer" containerID="fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.501989 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.508286 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nxn7m"] Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.508581 4783 scope.go:117] "RemoveContainer" containerID="d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.550100 4783 scope.go:117] "RemoveContainer" containerID="5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e" Sep 30 14:13:04 crc kubenswrapper[4783]: E0930 14:13:04.550730 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e\": container with ID starting with 5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e not found: ID does not exist" containerID="5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.550833 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e"} err="failed to get container status \"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e\": rpc error: code = NotFound desc = could not find container \"5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e\": container with ID starting with 5d652361454d73152562216901c91d4e33ecc5b4c7b899f90804c4d5a58e891e not found: ID does not exist" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.550893 4783 scope.go:117] "RemoveContainer" containerID="fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd" Sep 30 14:13:04 crc kubenswrapper[4783]: E0930 14:13:04.551514 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd\": container with ID starting with fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd not found: ID does not exist" containerID="fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.551561 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd"} err="failed to get container status \"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd\": rpc error: code = NotFound desc = could not find container \"fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd\": container with ID starting with fcd56484f7a3c96b6e90b1c0dae1f221f5c904e1ae21788cf8dce9ea98e2a3cd not found: ID does not exist" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.551603 4783 scope.go:117] "RemoveContainer" containerID="d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e" Sep 30 14:13:04 crc kubenswrapper[4783]: E0930 14:13:04.551899 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e\": container with ID starting with d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e not found: ID does not exist" containerID="d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.551934 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e"} err="failed to get container status \"d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e\": rpc error: code = NotFound desc = could not find container \"d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e\": container with ID starting with d93da1f246e3f8eaa55159c3df4f07e9cc36fea93e1fc2e307c9a469dace281e not found: ID does not exist" Sep 30 14:13:04 crc kubenswrapper[4783]: I0930 14:13:04.851477 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" path="/var/lib/kubelet/pods/5c47f9a9-7650-4052-bf4a-3ffb2e98255a/volumes" Sep 30 14:13:07 crc kubenswrapper[4783]: I0930 14:13:07.674122 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:13:07 crc kubenswrapper[4783]: I0930 14:13:07.674554 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:13:37 crc kubenswrapper[4783]: I0930 14:13:37.674392 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:13:37 crc kubenswrapper[4783]: I0930 14:13:37.675549 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:13:37 crc kubenswrapper[4783]: I0930 14:13:37.675620 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:13:37 crc kubenswrapper[4783]: I0930 14:13:37.676428 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:13:37 crc kubenswrapper[4783]: I0930 14:13:37.676500 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" gracePeriod=600 Sep 30 14:13:37 crc kubenswrapper[4783]: E0930 14:13:37.805745 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.730370 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" exitCode=0 Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.730411 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa"} Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.730740 4783 scope.go:117] "RemoveContainer" containerID="d556784dae98e8b58aa7de19fbab2ea6b36f26979cf5fba2f7252409875e121b" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.731527 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:13:38 crc kubenswrapper[4783]: E0930 14:13:38.732104 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.957206 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:13:38 crc kubenswrapper[4783]: E0930 14:13:38.957866 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="extract-utilities" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.957911 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="extract-utilities" Sep 30 14:13:38 crc kubenswrapper[4783]: E0930 14:13:38.957979 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="extract-content" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.957998 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="extract-content" Sep 30 14:13:38 crc kubenswrapper[4783]: E0930 14:13:38.958017 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="registry-server" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.958035 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="registry-server" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.958416 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c47f9a9-7650-4052-bf4a-3ffb2e98255a" containerName="registry-server" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.960495 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:38 crc kubenswrapper[4783]: I0930 14:13:38.964469 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.133368 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k7w9\" (UniqueName: \"kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.133491 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.133680 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.235464 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.235529 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k7w9\" (UniqueName: \"kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.235567 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.236042 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.236166 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.258850 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k7w9\" (UniqueName: \"kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9\") pod \"redhat-marketplace-4cz66\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.289771 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.713786 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:13:39 crc kubenswrapper[4783]: I0930 14:13:39.738094 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerStarted","Data":"8da77b16e2527d827aee84e329cb2030e3b5223b58d20a5d14e84e0ec84cf67e"} Sep 30 14:13:40 crc kubenswrapper[4783]: I0930 14:13:40.753267 4783 generic.go:334] "Generic (PLEG): container finished" podID="e8c876aa-591e-480e-8ff8-6da20be60494" containerID="7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1" exitCode=0 Sep 30 14:13:40 crc kubenswrapper[4783]: I0930 14:13:40.753572 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerDied","Data":"7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1"} Sep 30 14:13:40 crc kubenswrapper[4783]: I0930 14:13:40.755649 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:13:42 crc kubenswrapper[4783]: I0930 14:13:42.771421 4783 generic.go:334] "Generic (PLEG): container finished" podID="e8c876aa-591e-480e-8ff8-6da20be60494" containerID="f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5" exitCode=0 Sep 30 14:13:42 crc kubenswrapper[4783]: I0930 14:13:42.772304 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerDied","Data":"f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5"} Sep 30 14:13:45 crc kubenswrapper[4783]: I0930 14:13:45.799668 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerStarted","Data":"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f"} Sep 30 14:13:45 crc kubenswrapper[4783]: I0930 14:13:45.821778 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4cz66" podStartSLOduration=4.088474461 podStartE2EDuration="7.821754139s" podCreationTimestamp="2025-09-30 14:13:38 +0000 UTC" firstStartedPulling="2025-09-30 14:13:40.755452819 +0000 UTC m=+2320.686919126" lastFinishedPulling="2025-09-30 14:13:44.488732497 +0000 UTC m=+2324.420198804" observedRunningTime="2025-09-30 14:13:45.819056662 +0000 UTC m=+2325.750522979" watchObservedRunningTime="2025-09-30 14:13:45.821754139 +0000 UTC m=+2325.753220446" Sep 30 14:13:49 crc kubenswrapper[4783]: I0930 14:13:49.290877 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:49 crc kubenswrapper[4783]: I0930 14:13:49.291364 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:49 crc kubenswrapper[4783]: I0930 14:13:49.330723 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:49 crc kubenswrapper[4783]: I0930 14:13:49.843312 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:13:49 crc kubenswrapper[4783]: E0930 14:13:49.843695 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:13:59 crc kubenswrapper[4783]: I0930 14:13:59.329975 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:13:59 crc kubenswrapper[4783]: I0930 14:13:59.381749 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:13:59 crc kubenswrapper[4783]: I0930 14:13:59.908274 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4cz66" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="registry-server" containerID="cri-o://7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f" gracePeriod=2 Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.872694 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.921717 4783 generic.go:334] "Generic (PLEG): container finished" podID="e8c876aa-591e-480e-8ff8-6da20be60494" containerID="7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f" exitCode=0 Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.921757 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerDied","Data":"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f"} Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.921783 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cz66" event={"ID":"e8c876aa-591e-480e-8ff8-6da20be60494","Type":"ContainerDied","Data":"8da77b16e2527d827aee84e329cb2030e3b5223b58d20a5d14e84e0ec84cf67e"} Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.921800 4783 scope.go:117] "RemoveContainer" containerID="7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.921798 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cz66" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.951800 4783 scope.go:117] "RemoveContainer" containerID="f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.971677 4783 scope.go:117] "RemoveContainer" containerID="7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.996738 4783 scope.go:117] "RemoveContainer" containerID="7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f" Sep 30 14:14:00 crc kubenswrapper[4783]: E0930 14:14:00.997332 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f\": container with ID starting with 7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f not found: ID does not exist" containerID="7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.997382 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f"} err="failed to get container status \"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f\": rpc error: code = NotFound desc = could not find container \"7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f\": container with ID starting with 7c51f69167ae9654b6416affc87a481cf89cb6decf7444258a1165a11b87350f not found: ID does not exist" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.997421 4783 scope.go:117] "RemoveContainer" containerID="f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5" Sep 30 14:14:00 crc kubenswrapper[4783]: E0930 14:14:00.998067 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5\": container with ID starting with f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5 not found: ID does not exist" containerID="f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.998128 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5"} err="failed to get container status \"f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5\": rpc error: code = NotFound desc = could not find container \"f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5\": container with ID starting with f68d343e9c1c5f1375b7ebed35555a559efb03fdbb5bb85f91b9eb0947f8feb5 not found: ID does not exist" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.998156 4783 scope.go:117] "RemoveContainer" containerID="7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1" Sep 30 14:14:00 crc kubenswrapper[4783]: E0930 14:14:00.998547 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1\": container with ID starting with 7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1 not found: ID does not exist" containerID="7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1" Sep 30 14:14:00 crc kubenswrapper[4783]: I0930 14:14:00.998599 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1"} err="failed to get container status \"7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1\": rpc error: code = NotFound desc = could not find container \"7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1\": container with ID starting with 7c8577286053fef1760ff40c733859d4a880f4cc07bd98e17f9a2140ebb9b2c1 not found: ID does not exist" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.059525 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k7w9\" (UniqueName: \"kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9\") pod \"e8c876aa-591e-480e-8ff8-6da20be60494\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.059583 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content\") pod \"e8c876aa-591e-480e-8ff8-6da20be60494\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.059641 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities\") pod \"e8c876aa-591e-480e-8ff8-6da20be60494\" (UID: \"e8c876aa-591e-480e-8ff8-6da20be60494\") " Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.060832 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities" (OuterVolumeSpecName: "utilities") pod "e8c876aa-591e-480e-8ff8-6da20be60494" (UID: "e8c876aa-591e-480e-8ff8-6da20be60494"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.064960 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9" (OuterVolumeSpecName: "kube-api-access-5k7w9") pod "e8c876aa-591e-480e-8ff8-6da20be60494" (UID: "e8c876aa-591e-480e-8ff8-6da20be60494"). InnerVolumeSpecName "kube-api-access-5k7w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.072780 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8c876aa-591e-480e-8ff8-6da20be60494" (UID: "e8c876aa-591e-480e-8ff8-6da20be60494"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.161911 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.161977 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k7w9\" (UniqueName: \"kubernetes.io/projected/e8c876aa-591e-480e-8ff8-6da20be60494-kube-api-access-5k7w9\") on node \"crc\" DevicePath \"\"" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.161993 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c876aa-591e-480e-8ff8-6da20be60494-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.257821 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:14:01 crc kubenswrapper[4783]: I0930 14:14:01.266860 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cz66"] Sep 30 14:14:02 crc kubenswrapper[4783]: I0930 14:14:02.851296 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" path="/var/lib/kubelet/pods/e8c876aa-591e-480e-8ff8-6da20be60494/volumes" Sep 30 14:14:03 crc kubenswrapper[4783]: I0930 14:14:03.843215 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:14:03 crc kubenswrapper[4783]: E0930 14:14:03.843558 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:14:17 crc kubenswrapper[4783]: I0930 14:14:17.843682 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:14:17 crc kubenswrapper[4783]: E0930 14:14:17.844532 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:14:28 crc kubenswrapper[4783]: I0930 14:14:28.843053 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:14:28 crc kubenswrapper[4783]: E0930 14:14:28.843667 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:14:41 crc kubenswrapper[4783]: I0930 14:14:41.843202 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:14:41 crc kubenswrapper[4783]: E0930 14:14:41.844290 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:14:54 crc kubenswrapper[4783]: I0930 14:14:54.843614 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:14:55 crc kubenswrapper[4783]: E0930 14:14:54.844758 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.143935 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr"] Sep 30 14:15:00 crc kubenswrapper[4783]: E0930 14:15:00.144609 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="extract-utilities" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.144627 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="extract-utilities" Sep 30 14:15:00 crc kubenswrapper[4783]: E0930 14:15:00.144650 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="registry-server" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.144656 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="registry-server" Sep 30 14:15:00 crc kubenswrapper[4783]: E0930 14:15:00.144668 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="extract-content" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.144674 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="extract-content" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.144835 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c876aa-591e-480e-8ff8-6da20be60494" containerName="registry-server" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.145350 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.149002 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.152185 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.157442 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr"] Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.285043 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqsfh\" (UniqueName: \"kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.285105 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.285170 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.386800 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqsfh\" (UniqueName: \"kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.386960 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.387069 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.388482 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.394750 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.407135 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqsfh\" (UniqueName: \"kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh\") pod \"collect-profiles-29320695-ghknr\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.475460 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:00 crc kubenswrapper[4783]: I0930 14:15:00.887703 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr"] Sep 30 14:15:01 crc kubenswrapper[4783]: I0930 14:15:01.365660 4783 generic.go:334] "Generic (PLEG): container finished" podID="4a311fff-dc1c-46a7-9e74-ee4c03b630b2" containerID="997d648a041d3101eed6b0cfe6096fad2ee17ee6880115ad6af411abea50eebe" exitCode=0 Sep 30 14:15:01 crc kubenswrapper[4783]: I0930 14:15:01.365749 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" event={"ID":"4a311fff-dc1c-46a7-9e74-ee4c03b630b2","Type":"ContainerDied","Data":"997d648a041d3101eed6b0cfe6096fad2ee17ee6880115ad6af411abea50eebe"} Sep 30 14:15:01 crc kubenswrapper[4783]: I0930 14:15:01.365980 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" event={"ID":"4a311fff-dc1c-46a7-9e74-ee4c03b630b2","Type":"ContainerStarted","Data":"10e52750badaa61c4827aa8cf3cfd3d7a1f2e9f6884bc72eb6508ab2a7941775"} Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.645718 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.822850 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsfh\" (UniqueName: \"kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh\") pod \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.823935 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume\") pod \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.824093 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume\") pod \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\" (UID: \"4a311fff-dc1c-46a7-9e74-ee4c03b630b2\") " Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.824630 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume" (OuterVolumeSpecName: "config-volume") pod "4a311fff-dc1c-46a7-9e74-ee4c03b630b2" (UID: "4a311fff-dc1c-46a7-9e74-ee4c03b630b2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.829472 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4a311fff-dc1c-46a7-9e74-ee4c03b630b2" (UID: "4a311fff-dc1c-46a7-9e74-ee4c03b630b2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.829709 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh" (OuterVolumeSpecName: "kube-api-access-fqsfh") pod "4a311fff-dc1c-46a7-9e74-ee4c03b630b2" (UID: "4a311fff-dc1c-46a7-9e74-ee4c03b630b2"). InnerVolumeSpecName "kube-api-access-fqsfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.924925 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.924986 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:15:02 crc kubenswrapper[4783]: I0930 14:15:02.925014 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsfh\" (UniqueName: \"kubernetes.io/projected/4a311fff-dc1c-46a7-9e74-ee4c03b630b2-kube-api-access-fqsfh\") on node \"crc\" DevicePath \"\"" Sep 30 14:15:03 crc kubenswrapper[4783]: I0930 14:15:03.384856 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" Sep 30 14:15:03 crc kubenswrapper[4783]: I0930 14:15:03.385294 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr" event={"ID":"4a311fff-dc1c-46a7-9e74-ee4c03b630b2","Type":"ContainerDied","Data":"10e52750badaa61c4827aa8cf3cfd3d7a1f2e9f6884bc72eb6508ab2a7941775"} Sep 30 14:15:03 crc kubenswrapper[4783]: I0930 14:15:03.385351 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10e52750badaa61c4827aa8cf3cfd3d7a1f2e9f6884bc72eb6508ab2a7941775" Sep 30 14:15:03 crc kubenswrapper[4783]: I0930 14:15:03.709108 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42"] Sep 30 14:15:03 crc kubenswrapper[4783]: I0930 14:15:03.714188 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320650-9kh42"] Sep 30 14:15:04 crc kubenswrapper[4783]: I0930 14:15:04.857671 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6af2276a-3ae6-4c19-b75c-935d765d3890" path="/var/lib/kubelet/pods/6af2276a-3ae6-4c19-b75c-935d765d3890/volumes" Sep 30 14:15:08 crc kubenswrapper[4783]: I0930 14:15:08.843657 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:15:08 crc kubenswrapper[4783]: E0930 14:15:08.843930 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:15:17 crc kubenswrapper[4783]: I0930 14:15:17.625800 4783 scope.go:117] "RemoveContainer" containerID="af7a544648adb8b681260694207537bd4b9123dbb3afae22df9becc0d3095939" Sep 30 14:15:23 crc kubenswrapper[4783]: I0930 14:15:23.843672 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:15:23 crc kubenswrapper[4783]: E0930 14:15:23.844439 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:15:36 crc kubenswrapper[4783]: I0930 14:15:36.843678 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:15:36 crc kubenswrapper[4783]: E0930 14:15:36.844659 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:15:47 crc kubenswrapper[4783]: I0930 14:15:47.843577 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:15:47 crc kubenswrapper[4783]: E0930 14:15:47.844749 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:16:02 crc kubenswrapper[4783]: I0930 14:16:02.843382 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:16:02 crc kubenswrapper[4783]: E0930 14:16:02.844103 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:16:15 crc kubenswrapper[4783]: I0930 14:16:15.843246 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:16:15 crc kubenswrapper[4783]: E0930 14:16:15.843996 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:16:26 crc kubenswrapper[4783]: I0930 14:16:26.843928 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:16:26 crc kubenswrapper[4783]: E0930 14:16:26.844842 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:16:37 crc kubenswrapper[4783]: I0930 14:16:37.842945 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:16:37 crc kubenswrapper[4783]: E0930 14:16:37.843990 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:16:50 crc kubenswrapper[4783]: I0930 14:16:50.847288 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:16:50 crc kubenswrapper[4783]: E0930 14:16:50.848068 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:17:01 crc kubenswrapper[4783]: I0930 14:17:01.844412 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:17:01 crc kubenswrapper[4783]: E0930 14:17:01.850850 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:17:15 crc kubenswrapper[4783]: I0930 14:17:15.843093 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:17:15 crc kubenswrapper[4783]: E0930 14:17:15.844139 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:17:29 crc kubenswrapper[4783]: I0930 14:17:29.843917 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:17:29 crc kubenswrapper[4783]: E0930 14:17:29.845169 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:17:42 crc kubenswrapper[4783]: I0930 14:17:42.843861 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:17:42 crc kubenswrapper[4783]: E0930 14:17:42.845030 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:17:54 crc kubenswrapper[4783]: I0930 14:17:54.847528 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:17:54 crc kubenswrapper[4783]: E0930 14:17:54.848097 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:18:08 crc kubenswrapper[4783]: I0930 14:18:08.844014 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:18:08 crc kubenswrapper[4783]: E0930 14:18:08.844778 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:18:22 crc kubenswrapper[4783]: I0930 14:18:22.842891 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:18:22 crc kubenswrapper[4783]: E0930 14:18:22.843572 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:18:34 crc kubenswrapper[4783]: I0930 14:18:34.842918 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:18:34 crc kubenswrapper[4783]: E0930 14:18:34.843583 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:18:47 crc kubenswrapper[4783]: I0930 14:18:47.843591 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:18:49 crc kubenswrapper[4783]: I0930 14:18:49.016149 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab"} Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.757187 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9jkbq"] Sep 30 14:19:10 crc kubenswrapper[4783]: E0930 14:19:10.757923 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a311fff-dc1c-46a7-9e74-ee4c03b630b2" containerName="collect-profiles" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.757936 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a311fff-dc1c-46a7-9e74-ee4c03b630b2" containerName="collect-profiles" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.758077 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a311fff-dc1c-46a7-9e74-ee4c03b630b2" containerName="collect-profiles" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.759092 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.780434 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9jkbq"] Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.867844 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7hlx\" (UniqueName: \"kubernetes.io/projected/39e74efa-c715-4736-8ed9-98a8ae696f8f-kube-api-access-c7hlx\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.867941 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-utilities\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.867962 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-catalog-content\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.969916 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-utilities\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.969959 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-catalog-content\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.970044 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7hlx\" (UniqueName: \"kubernetes.io/projected/39e74efa-c715-4736-8ed9-98a8ae696f8f-kube-api-access-c7hlx\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.970600 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-utilities\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.970641 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39e74efa-c715-4736-8ed9-98a8ae696f8f-catalog-content\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:10 crc kubenswrapper[4783]: I0930 14:19:10.989518 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7hlx\" (UniqueName: \"kubernetes.io/projected/39e74efa-c715-4736-8ed9-98a8ae696f8f-kube-api-access-c7hlx\") pod \"redhat-operators-9jkbq\" (UID: \"39e74efa-c715-4736-8ed9-98a8ae696f8f\") " pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:11 crc kubenswrapper[4783]: I0930 14:19:11.096526 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:11 crc kubenswrapper[4783]: I0930 14:19:11.319425 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9jkbq"] Sep 30 14:19:12 crc kubenswrapper[4783]: I0930 14:19:12.179402 4783 generic.go:334] "Generic (PLEG): container finished" podID="39e74efa-c715-4736-8ed9-98a8ae696f8f" containerID="7e5673f63158fb18197f273d2b62ecab2eff8fb5dd6d08c0308c6d678cb98a2b" exitCode=0 Sep 30 14:19:12 crc kubenswrapper[4783]: I0930 14:19:12.179500 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jkbq" event={"ID":"39e74efa-c715-4736-8ed9-98a8ae696f8f","Type":"ContainerDied","Data":"7e5673f63158fb18197f273d2b62ecab2eff8fb5dd6d08c0308c6d678cb98a2b"} Sep 30 14:19:12 crc kubenswrapper[4783]: I0930 14:19:12.179830 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jkbq" event={"ID":"39e74efa-c715-4736-8ed9-98a8ae696f8f","Type":"ContainerStarted","Data":"09b83529071fdaf0c717d72723c9ff054fa0b62aba15881a9dbb0b4a382c6c49"} Sep 30 14:19:12 crc kubenswrapper[4783]: I0930 14:19:12.183355 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:19:20 crc kubenswrapper[4783]: I0930 14:19:20.246907 4783 generic.go:334] "Generic (PLEG): container finished" podID="39e74efa-c715-4736-8ed9-98a8ae696f8f" containerID="fbc0bc36a52bbacaf718db29571a385c747763c6205e34111f483a760565cefc" exitCode=0 Sep 30 14:19:20 crc kubenswrapper[4783]: I0930 14:19:20.247037 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jkbq" event={"ID":"39e74efa-c715-4736-8ed9-98a8ae696f8f","Type":"ContainerDied","Data":"fbc0bc36a52bbacaf718db29571a385c747763c6205e34111f483a760565cefc"} Sep 30 14:19:24 crc kubenswrapper[4783]: I0930 14:19:24.278145 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jkbq" event={"ID":"39e74efa-c715-4736-8ed9-98a8ae696f8f","Type":"ContainerStarted","Data":"c759ca5898933d2297654911ce1f51bae50013aa3f144393db44a901a4d3b552"} Sep 30 14:19:25 crc kubenswrapper[4783]: I0930 14:19:25.307083 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9jkbq" podStartSLOduration=3.402354429 podStartE2EDuration="15.307065977s" podCreationTimestamp="2025-09-30 14:19:10 +0000 UTC" firstStartedPulling="2025-09-30 14:19:12.182987246 +0000 UTC m=+2652.114453553" lastFinishedPulling="2025-09-30 14:19:24.087698794 +0000 UTC m=+2664.019165101" observedRunningTime="2025-09-30 14:19:25.305886009 +0000 UTC m=+2665.237352326" watchObservedRunningTime="2025-09-30 14:19:25.307065977 +0000 UTC m=+2665.238532284" Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.097784 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.099029 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.138209 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.377145 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9jkbq" Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.470777 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9jkbq"] Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.516377 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.516777 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mn9rq" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="registry-server" containerID="cri-o://440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75" gracePeriod=2 Sep 30 14:19:31 crc kubenswrapper[4783]: I0930 14:19:31.972535 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.103045 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content\") pod \"25ae6168-8819-424c-a382-02f0c3d7b386\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.103314 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbsqz\" (UniqueName: \"kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz\") pod \"25ae6168-8819-424c-a382-02f0c3d7b386\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.103402 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities\") pod \"25ae6168-8819-424c-a382-02f0c3d7b386\" (UID: \"25ae6168-8819-424c-a382-02f0c3d7b386\") " Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.104097 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities" (OuterVolumeSpecName: "utilities") pod "25ae6168-8819-424c-a382-02f0c3d7b386" (UID: "25ae6168-8819-424c-a382-02f0c3d7b386"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.110561 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz" (OuterVolumeSpecName: "kube-api-access-gbsqz") pod "25ae6168-8819-424c-a382-02f0c3d7b386" (UID: "25ae6168-8819-424c-a382-02f0c3d7b386"). InnerVolumeSpecName "kube-api-access-gbsqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.199512 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25ae6168-8819-424c-a382-02f0c3d7b386" (UID: "25ae6168-8819-424c-a382-02f0c3d7b386"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.212028 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbsqz\" (UniqueName: \"kubernetes.io/projected/25ae6168-8819-424c-a382-02f0c3d7b386-kube-api-access-gbsqz\") on node \"crc\" DevicePath \"\"" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.212078 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.212105 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25ae6168-8819-424c-a382-02f0c3d7b386-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.337772 4783 generic.go:334] "Generic (PLEG): container finished" podID="25ae6168-8819-424c-a382-02f0c3d7b386" containerID="440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75" exitCode=0 Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.337904 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9rq" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.337955 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerDied","Data":"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75"} Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.337998 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9rq" event={"ID":"25ae6168-8819-424c-a382-02f0c3d7b386","Type":"ContainerDied","Data":"5923a981d708a2cb5d546b4c220178f32b69e1d33feac0d2637f72fd819bea57"} Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.338015 4783 scope.go:117] "RemoveContainer" containerID="440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.357994 4783 scope.go:117] "RemoveContainer" containerID="d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.375807 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.385371 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mn9rq"] Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.388444 4783 scope.go:117] "RemoveContainer" containerID="9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.416361 4783 scope.go:117] "RemoveContainer" containerID="440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75" Sep 30 14:19:32 crc kubenswrapper[4783]: E0930 14:19:32.416835 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75\": container with ID starting with 440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75 not found: ID does not exist" containerID="440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.416871 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75"} err="failed to get container status \"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75\": rpc error: code = NotFound desc = could not find container \"440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75\": container with ID starting with 440bbc8ac09316051df3f8d196a6384d435c34700064872cdc417963c4e1af75 not found: ID does not exist" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.416902 4783 scope.go:117] "RemoveContainer" containerID="d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911" Sep 30 14:19:32 crc kubenswrapper[4783]: E0930 14:19:32.417337 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911\": container with ID starting with d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911 not found: ID does not exist" containerID="d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.417365 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911"} err="failed to get container status \"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911\": rpc error: code = NotFound desc = could not find container \"d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911\": container with ID starting with d12591c06734399d01e45499801946cd002c136762a7ecac7c764f0f5ab08911 not found: ID does not exist" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.417383 4783 scope.go:117] "RemoveContainer" containerID="9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e" Sep 30 14:19:32 crc kubenswrapper[4783]: E0930 14:19:32.417613 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e\": container with ID starting with 9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e not found: ID does not exist" containerID="9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.417645 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e"} err="failed to get container status \"9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e\": rpc error: code = NotFound desc = could not find container \"9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e\": container with ID starting with 9f7cbb9f90b7b76b8d50eea6864d6417824bf420930ecf386960528cb10f064e not found: ID does not exist" Sep 30 14:19:32 crc kubenswrapper[4783]: I0930 14:19:32.853303 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" path="/var/lib/kubelet/pods/25ae6168-8819-424c-a382-02f0c3d7b386/volumes" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.134733 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:26 crc kubenswrapper[4783]: E0930 14:20:26.135698 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="extract-content" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.135715 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="extract-content" Sep 30 14:20:26 crc kubenswrapper[4783]: E0930 14:20:26.135741 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="extract-utilities" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.135750 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="extract-utilities" Sep 30 14:20:26 crc kubenswrapper[4783]: E0930 14:20:26.135763 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="registry-server" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.135771 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="registry-server" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.135965 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="25ae6168-8819-424c-a382-02f0c3d7b386" containerName="registry-server" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.137292 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.152628 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.280128 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.280312 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.280375 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltwtz\" (UniqueName: \"kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.382142 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.382210 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltwtz\" (UniqueName: \"kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.382327 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.382810 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.382830 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.401201 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltwtz\" (UniqueName: \"kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz\") pod \"certified-operators-5sclz\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.462744 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:26 crc kubenswrapper[4783]: I0930 14:20:26.925525 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:27 crc kubenswrapper[4783]: I0930 14:20:27.758034 4783 generic.go:334] "Generic (PLEG): container finished" podID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerID="0c445eff62eabc0bd79fdb31ee0e80583b7f2d9047dff308ef703288ccd05efd" exitCode=0 Sep 30 14:20:27 crc kubenswrapper[4783]: I0930 14:20:27.758077 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerDied","Data":"0c445eff62eabc0bd79fdb31ee0e80583b7f2d9047dff308ef703288ccd05efd"} Sep 30 14:20:27 crc kubenswrapper[4783]: I0930 14:20:27.758105 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerStarted","Data":"2a14f05d4e9cf8fc610636466aaf33a5d0c77d83294a7d2e79dd8be364aba477"} Sep 30 14:20:28 crc kubenswrapper[4783]: I0930 14:20:28.772272 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerStarted","Data":"7fa3baaf7b4fb9c2ca87d6a8c79a60ccb271a136da0d4da2c8da64db74bd3ad9"} Sep 30 14:20:29 crc kubenswrapper[4783]: I0930 14:20:29.786504 4783 generic.go:334] "Generic (PLEG): container finished" podID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerID="7fa3baaf7b4fb9c2ca87d6a8c79a60ccb271a136da0d4da2c8da64db74bd3ad9" exitCode=0 Sep 30 14:20:29 crc kubenswrapper[4783]: I0930 14:20:29.786561 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerDied","Data":"7fa3baaf7b4fb9c2ca87d6a8c79a60ccb271a136da0d4da2c8da64db74bd3ad9"} Sep 30 14:20:30 crc kubenswrapper[4783]: I0930 14:20:30.796285 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerStarted","Data":"939d4b3a458f5b32643cc950ead24805aa117a0015e87a562c04dca4de36ca98"} Sep 30 14:20:30 crc kubenswrapper[4783]: I0930 14:20:30.813780 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5sclz" podStartSLOduration=2.254002339 podStartE2EDuration="4.81376065s" podCreationTimestamp="2025-09-30 14:20:26 +0000 UTC" firstStartedPulling="2025-09-30 14:20:27.763553771 +0000 UTC m=+2727.695020078" lastFinishedPulling="2025-09-30 14:20:30.323312072 +0000 UTC m=+2730.254778389" observedRunningTime="2025-09-30 14:20:30.809961969 +0000 UTC m=+2730.741428276" watchObservedRunningTime="2025-09-30 14:20:30.81376065 +0000 UTC m=+2730.745226967" Sep 30 14:20:36 crc kubenswrapper[4783]: I0930 14:20:36.463496 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:36 crc kubenswrapper[4783]: I0930 14:20:36.463767 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:36 crc kubenswrapper[4783]: I0930 14:20:36.509898 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:36 crc kubenswrapper[4783]: I0930 14:20:36.876464 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:36 crc kubenswrapper[4783]: I0930 14:20:36.934245 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:38 crc kubenswrapper[4783]: I0930 14:20:38.848764 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5sclz" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="registry-server" containerID="cri-o://939d4b3a458f5b32643cc950ead24805aa117a0015e87a562c04dca4de36ca98" gracePeriod=2 Sep 30 14:20:39 crc kubenswrapper[4783]: I0930 14:20:39.861178 4783 generic.go:334] "Generic (PLEG): container finished" podID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerID="939d4b3a458f5b32643cc950ead24805aa117a0015e87a562c04dca4de36ca98" exitCode=0 Sep 30 14:20:39 crc kubenswrapper[4783]: I0930 14:20:39.861232 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerDied","Data":"939d4b3a458f5b32643cc950ead24805aa117a0015e87a562c04dca4de36ca98"} Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.651044 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.686516 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltwtz\" (UniqueName: \"kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz\") pod \"f04d7276-9231-4f78-b3d2-f2915b40b154\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.686597 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities\") pod \"f04d7276-9231-4f78-b3d2-f2915b40b154\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.686625 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content\") pod \"f04d7276-9231-4f78-b3d2-f2915b40b154\" (UID: \"f04d7276-9231-4f78-b3d2-f2915b40b154\") " Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.687936 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities" (OuterVolumeSpecName: "utilities") pod "f04d7276-9231-4f78-b3d2-f2915b40b154" (UID: "f04d7276-9231-4f78-b3d2-f2915b40b154"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.692162 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz" (OuterVolumeSpecName: "kube-api-access-ltwtz") pod "f04d7276-9231-4f78-b3d2-f2915b40b154" (UID: "f04d7276-9231-4f78-b3d2-f2915b40b154"). InnerVolumeSpecName "kube-api-access-ltwtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.730024 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f04d7276-9231-4f78-b3d2-f2915b40b154" (UID: "f04d7276-9231-4f78-b3d2-f2915b40b154"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.787892 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltwtz\" (UniqueName: \"kubernetes.io/projected/f04d7276-9231-4f78-b3d2-f2915b40b154-kube-api-access-ltwtz\") on node \"crc\" DevicePath \"\"" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.788368 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.788495 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f04d7276-9231-4f78-b3d2-f2915b40b154-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.871584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sclz" event={"ID":"f04d7276-9231-4f78-b3d2-f2915b40b154","Type":"ContainerDied","Data":"2a14f05d4e9cf8fc610636466aaf33a5d0c77d83294a7d2e79dd8be364aba477"} Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.871632 4783 scope.go:117] "RemoveContainer" containerID="939d4b3a458f5b32643cc950ead24805aa117a0015e87a562c04dca4de36ca98" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.871772 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sclz" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.898142 4783 scope.go:117] "RemoveContainer" containerID="7fa3baaf7b4fb9c2ca87d6a8c79a60ccb271a136da0d4da2c8da64db74bd3ad9" Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.899259 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.905014 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5sclz"] Sep 30 14:20:40 crc kubenswrapper[4783]: I0930 14:20:40.925002 4783 scope.go:117] "RemoveContainer" containerID="0c445eff62eabc0bd79fdb31ee0e80583b7f2d9047dff308ef703288ccd05efd" Sep 30 14:20:42 crc kubenswrapper[4783]: I0930 14:20:42.855334 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" path="/var/lib/kubelet/pods/f04d7276-9231-4f78-b3d2-f2915b40b154/volumes" Sep 30 14:21:07 crc kubenswrapper[4783]: I0930 14:21:07.674106 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:21:07 crc kubenswrapper[4783]: I0930 14:21:07.674964 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:21:37 crc kubenswrapper[4783]: I0930 14:21:37.674092 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:21:37 crc kubenswrapper[4783]: I0930 14:21:37.674665 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:22:07 crc kubenswrapper[4783]: I0930 14:22:07.674662 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:22:07 crc kubenswrapper[4783]: I0930 14:22:07.676343 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:22:07 crc kubenswrapper[4783]: I0930 14:22:07.676458 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:22:07 crc kubenswrapper[4783]: I0930 14:22:07.677153 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:22:07 crc kubenswrapper[4783]: I0930 14:22:07.677312 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab" gracePeriod=600 Sep 30 14:22:08 crc kubenswrapper[4783]: I0930 14:22:08.582437 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab" exitCode=0 Sep 30 14:22:08 crc kubenswrapper[4783]: I0930 14:22:08.582513 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab"} Sep 30 14:22:08 crc kubenswrapper[4783]: I0930 14:22:08.582712 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7"} Sep 30 14:22:08 crc kubenswrapper[4783]: I0930 14:22:08.582734 4783 scope.go:117] "RemoveContainer" containerID="c2a1438d8e733476bd9e96201ea219a4b2a632af92206e4b2bae80654871d4aa" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.150342 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:04 crc kubenswrapper[4783]: E0930 14:23:04.151360 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="extract-utilities" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.151380 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="extract-utilities" Sep 30 14:23:04 crc kubenswrapper[4783]: E0930 14:23:04.151396 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="extract-content" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.151404 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="extract-content" Sep 30 14:23:04 crc kubenswrapper[4783]: E0930 14:23:04.151420 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="registry-server" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.151427 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="registry-server" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.151606 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f04d7276-9231-4f78-b3d2-f2915b40b154" containerName="registry-server" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.152952 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.165517 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.284700 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsv2b\" (UniqueName: \"kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.284955 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.285115 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.386461 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.386549 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.386619 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsv2b\" (UniqueName: \"kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.387192 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.387236 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.418130 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsv2b\" (UniqueName: \"kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b\") pod \"community-operators-b4664\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.489530 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:04 crc kubenswrapper[4783]: I0930 14:23:04.753710 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:05 crc kubenswrapper[4783]: I0930 14:23:05.029011 4783 generic.go:334] "Generic (PLEG): container finished" podID="45026355-351d-4e48-8890-e67333c41a4a" containerID="e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06" exitCode=0 Sep 30 14:23:05 crc kubenswrapper[4783]: I0930 14:23:05.029355 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerDied","Data":"e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06"} Sep 30 14:23:05 crc kubenswrapper[4783]: I0930 14:23:05.029390 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerStarted","Data":"3e283190e7c3d16d58ee718a52455c0b55b1ca6f145a86993ffc30b4a2d79ce9"} Sep 30 14:23:06 crc kubenswrapper[4783]: I0930 14:23:06.039466 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerStarted","Data":"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea"} Sep 30 14:23:07 crc kubenswrapper[4783]: I0930 14:23:07.048384 4783 generic.go:334] "Generic (PLEG): container finished" podID="45026355-351d-4e48-8890-e67333c41a4a" containerID="a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea" exitCode=0 Sep 30 14:23:07 crc kubenswrapper[4783]: I0930 14:23:07.048480 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerDied","Data":"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea"} Sep 30 14:23:07 crc kubenswrapper[4783]: I0930 14:23:07.048734 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerStarted","Data":"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18"} Sep 30 14:23:07 crc kubenswrapper[4783]: I0930 14:23:07.087292 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b4664" podStartSLOduration=1.644012766 podStartE2EDuration="3.087269927s" podCreationTimestamp="2025-09-30 14:23:04 +0000 UTC" firstStartedPulling="2025-09-30 14:23:05.031614472 +0000 UTC m=+2884.963080789" lastFinishedPulling="2025-09-30 14:23:06.474871633 +0000 UTC m=+2886.406337950" observedRunningTime="2025-09-30 14:23:07.078846705 +0000 UTC m=+2887.010313012" watchObservedRunningTime="2025-09-30 14:23:07.087269927 +0000 UTC m=+2887.018736234" Sep 30 14:23:14 crc kubenswrapper[4783]: I0930 14:23:14.489939 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:14 crc kubenswrapper[4783]: I0930 14:23:14.490855 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:14 crc kubenswrapper[4783]: I0930 14:23:14.539581 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:15 crc kubenswrapper[4783]: I0930 14:23:15.159272 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.540338 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.540862 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b4664" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="registry-server" containerID="cri-o://4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18" gracePeriod=2 Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.959500 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.986249 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities\") pod \"45026355-351d-4e48-8890-e67333c41a4a\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.986329 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content\") pod \"45026355-351d-4e48-8890-e67333c41a4a\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.986354 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsv2b\" (UniqueName: \"kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b\") pod \"45026355-351d-4e48-8890-e67333c41a4a\" (UID: \"45026355-351d-4e48-8890-e67333c41a4a\") " Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.987568 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities" (OuterVolumeSpecName: "utilities") pod "45026355-351d-4e48-8890-e67333c41a4a" (UID: "45026355-351d-4e48-8890-e67333c41a4a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:23:17 crc kubenswrapper[4783]: I0930 14:23:17.993381 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b" (OuterVolumeSpecName: "kube-api-access-lsv2b") pod "45026355-351d-4e48-8890-e67333c41a4a" (UID: "45026355-351d-4e48-8890-e67333c41a4a"). InnerVolumeSpecName "kube-api-access-lsv2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.032689 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "45026355-351d-4e48-8890-e67333c41a4a" (UID: "45026355-351d-4e48-8890-e67333c41a4a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.087412 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsv2b\" (UniqueName: \"kubernetes.io/projected/45026355-351d-4e48-8890-e67333c41a4a-kube-api-access-lsv2b\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.087442 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.087451 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45026355-351d-4e48-8890-e67333c41a4a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.159741 4783 generic.go:334] "Generic (PLEG): container finished" podID="45026355-351d-4e48-8890-e67333c41a4a" containerID="4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18" exitCode=0 Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.159803 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerDied","Data":"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18"} Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.159828 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4664" event={"ID":"45026355-351d-4e48-8890-e67333c41a4a","Type":"ContainerDied","Data":"3e283190e7c3d16d58ee718a52455c0b55b1ca6f145a86993ffc30b4a2d79ce9"} Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.159868 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4664" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.159876 4783 scope.go:117] "RemoveContainer" containerID="4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.187183 4783 scope.go:117] "RemoveContainer" containerID="a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.219476 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.223481 4783 scope.go:117] "RemoveContainer" containerID="e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.228557 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b4664"] Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.241948 4783 scope.go:117] "RemoveContainer" containerID="4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18" Sep 30 14:23:18 crc kubenswrapper[4783]: E0930 14:23:18.242334 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18\": container with ID starting with 4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18 not found: ID does not exist" containerID="4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.242509 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18"} err="failed to get container status \"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18\": rpc error: code = NotFound desc = could not find container \"4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18\": container with ID starting with 4806d9a240eadc18c943432732fac700d1ca2d2c9d92fe01df5f655fa0555f18 not found: ID does not exist" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.242622 4783 scope.go:117] "RemoveContainer" containerID="a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea" Sep 30 14:23:18 crc kubenswrapper[4783]: E0930 14:23:18.243037 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea\": container with ID starting with a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea not found: ID does not exist" containerID="a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.243062 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea"} err="failed to get container status \"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea\": rpc error: code = NotFound desc = could not find container \"a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea\": container with ID starting with a6d035af531ad21280e814aae840448ec5beaefa17ca966cfe287ff6993c26ea not found: ID does not exist" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.243077 4783 scope.go:117] "RemoveContainer" containerID="e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06" Sep 30 14:23:18 crc kubenswrapper[4783]: E0930 14:23:18.243953 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06\": container with ID starting with e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06 not found: ID does not exist" containerID="e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.243995 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06"} err="failed to get container status \"e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06\": rpc error: code = NotFound desc = could not find container \"e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06\": container with ID starting with e6d7a67ea469e45b8e66cde0b84afb41321efc0c0355fd13345dd9fdfcd01f06 not found: ID does not exist" Sep 30 14:23:18 crc kubenswrapper[4783]: I0930 14:23:18.851917 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45026355-351d-4e48-8890-e67333c41a4a" path="/var/lib/kubelet/pods/45026355-351d-4e48-8890-e67333c41a4a/volumes" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.932788 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:44 crc kubenswrapper[4783]: E0930 14:23:44.933658 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="registry-server" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.933675 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="registry-server" Sep 30 14:23:44 crc kubenswrapper[4783]: E0930 14:23:44.933706 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="extract-utilities" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.933715 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="extract-utilities" Sep 30 14:23:44 crc kubenswrapper[4783]: E0930 14:23:44.933726 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="extract-content" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.933734 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="extract-content" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.933942 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="45026355-351d-4e48-8890-e67333c41a4a" containerName="registry-server" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.935248 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.949510 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.971767 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.971835 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktkn8\" (UniqueName: \"kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:44 crc kubenswrapper[4783]: I0930 14:23:44.972006 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.073734 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.073924 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.073987 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktkn8\" (UniqueName: \"kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.074449 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.074449 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.096962 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktkn8\" (UniqueName: \"kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8\") pod \"redhat-marketplace-lvcj7\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.254443 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:45 crc kubenswrapper[4783]: I0930 14:23:45.673417 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:46 crc kubenswrapper[4783]: I0930 14:23:46.379192 4783 generic.go:334] "Generic (PLEG): container finished" podID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerID="4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4" exitCode=0 Sep 30 14:23:46 crc kubenswrapper[4783]: I0930 14:23:46.379272 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerDied","Data":"4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4"} Sep 30 14:23:46 crc kubenswrapper[4783]: I0930 14:23:46.379322 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerStarted","Data":"3a9ff9265f37885d361d1e41beeee5c613a9fcf0021e93896339ef2dfcf7d936"} Sep 30 14:23:47 crc kubenswrapper[4783]: I0930 14:23:47.388742 4783 generic.go:334] "Generic (PLEG): container finished" podID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerID="9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe" exitCode=0 Sep 30 14:23:47 crc kubenswrapper[4783]: I0930 14:23:47.388920 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerDied","Data":"9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe"} Sep 30 14:23:48 crc kubenswrapper[4783]: I0930 14:23:48.398002 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerStarted","Data":"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835"} Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.255305 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.255906 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.297600 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.321839 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lvcj7" podStartSLOduration=9.952662663 podStartE2EDuration="11.321813498s" podCreationTimestamp="2025-09-30 14:23:44 +0000 UTC" firstStartedPulling="2025-09-30 14:23:46.381503258 +0000 UTC m=+2926.312969565" lastFinishedPulling="2025-09-30 14:23:47.750654093 +0000 UTC m=+2927.682120400" observedRunningTime="2025-09-30 14:23:48.417880782 +0000 UTC m=+2928.349347089" watchObservedRunningTime="2025-09-30 14:23:55.321813498 +0000 UTC m=+2935.253279805" Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.495298 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:55 crc kubenswrapper[4783]: I0930 14:23:55.547101 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:57 crc kubenswrapper[4783]: I0930 14:23:57.467727 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lvcj7" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="registry-server" containerID="cri-o://e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835" gracePeriod=2 Sep 30 14:23:57 crc kubenswrapper[4783]: I0930 14:23:57.921668 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.000576 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content\") pod \"d95759f0-fe8c-457d-a056-8d1e47908ba0\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.000698 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktkn8\" (UniqueName: \"kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8\") pod \"d95759f0-fe8c-457d-a056-8d1e47908ba0\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.000799 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities\") pod \"d95759f0-fe8c-457d-a056-8d1e47908ba0\" (UID: \"d95759f0-fe8c-457d-a056-8d1e47908ba0\") " Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.002828 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities" (OuterVolumeSpecName: "utilities") pod "d95759f0-fe8c-457d-a056-8d1e47908ba0" (UID: "d95759f0-fe8c-457d-a056-8d1e47908ba0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.006659 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.008283 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8" (OuterVolumeSpecName: "kube-api-access-ktkn8") pod "d95759f0-fe8c-457d-a056-8d1e47908ba0" (UID: "d95759f0-fe8c-457d-a056-8d1e47908ba0"). InnerVolumeSpecName "kube-api-access-ktkn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.015392 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d95759f0-fe8c-457d-a056-8d1e47908ba0" (UID: "d95759f0-fe8c-457d-a056-8d1e47908ba0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.108129 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d95759f0-fe8c-457d-a056-8d1e47908ba0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.108588 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktkn8\" (UniqueName: \"kubernetes.io/projected/d95759f0-fe8c-457d-a056-8d1e47908ba0-kube-api-access-ktkn8\") on node \"crc\" DevicePath \"\"" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.477104 4783 generic.go:334] "Generic (PLEG): container finished" podID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerID="e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835" exitCode=0 Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.477160 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerDied","Data":"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835"} Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.477195 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvcj7" event={"ID":"d95759f0-fe8c-457d-a056-8d1e47908ba0","Type":"ContainerDied","Data":"3a9ff9265f37885d361d1e41beeee5c613a9fcf0021e93896339ef2dfcf7d936"} Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.477251 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvcj7" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.477268 4783 scope.go:117] "RemoveContainer" containerID="e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.499246 4783 scope.go:117] "RemoveContainer" containerID="9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.523514 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.535754 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvcj7"] Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.538509 4783 scope.go:117] "RemoveContainer" containerID="4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.559630 4783 scope.go:117] "RemoveContainer" containerID="e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835" Sep 30 14:23:58 crc kubenswrapper[4783]: E0930 14:23:58.560246 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835\": container with ID starting with e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835 not found: ID does not exist" containerID="e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.560301 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835"} err="failed to get container status \"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835\": rpc error: code = NotFound desc = could not find container \"e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835\": container with ID starting with e5a50b4822b394805f5791d0d61c45d955e27aaf1b426bf76e52454d95953835 not found: ID does not exist" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.560339 4783 scope.go:117] "RemoveContainer" containerID="9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe" Sep 30 14:23:58 crc kubenswrapper[4783]: E0930 14:23:58.560745 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe\": container with ID starting with 9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe not found: ID does not exist" containerID="9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.560774 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe"} err="failed to get container status \"9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe\": rpc error: code = NotFound desc = could not find container \"9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe\": container with ID starting with 9dfd5e306431d6f4c3971b5db806311fcf47168c0a1bf9c19a7d7c985d2313fe not found: ID does not exist" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.560796 4783 scope.go:117] "RemoveContainer" containerID="4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4" Sep 30 14:23:58 crc kubenswrapper[4783]: E0930 14:23:58.561139 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4\": container with ID starting with 4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4 not found: ID does not exist" containerID="4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.561167 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4"} err="failed to get container status \"4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4\": rpc error: code = NotFound desc = could not find container \"4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4\": container with ID starting with 4e9d926b1e76bfca8ea460a44aec1eecfd9b76026e4edde84119e390ea3a71e4 not found: ID does not exist" Sep 30 14:23:58 crc kubenswrapper[4783]: I0930 14:23:58.853327 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" path="/var/lib/kubelet/pods/d95759f0-fe8c-457d-a056-8d1e47908ba0/volumes" Sep 30 14:24:37 crc kubenswrapper[4783]: I0930 14:24:37.674321 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:24:37 crc kubenswrapper[4783]: I0930 14:24:37.674843 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:25:07 crc kubenswrapper[4783]: I0930 14:25:07.674515 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:25:07 crc kubenswrapper[4783]: I0930 14:25:07.675204 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:25:37 crc kubenswrapper[4783]: I0930 14:25:37.673888 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:25:37 crc kubenswrapper[4783]: I0930 14:25:37.674616 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:25:37 crc kubenswrapper[4783]: I0930 14:25:37.674680 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:25:37 crc kubenswrapper[4783]: I0930 14:25:37.675579 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:25:37 crc kubenswrapper[4783]: I0930 14:25:37.675678 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" gracePeriod=600 Sep 30 14:25:37 crc kubenswrapper[4783]: E0930 14:25:37.803085 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:25:38 crc kubenswrapper[4783]: I0930 14:25:38.274986 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" exitCode=0 Sep 30 14:25:38 crc kubenswrapper[4783]: I0930 14:25:38.275023 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7"} Sep 30 14:25:38 crc kubenswrapper[4783]: I0930 14:25:38.275054 4783 scope.go:117] "RemoveContainer" containerID="ce7050e6973576d4e40f4e8bad410802b76630552004372d2c16c0e7cf2d12ab" Sep 30 14:25:38 crc kubenswrapper[4783]: I0930 14:25:38.275732 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:25:38 crc kubenswrapper[4783]: E0930 14:25:38.276183 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:25:53 crc kubenswrapper[4783]: I0930 14:25:53.843064 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:25:53 crc kubenswrapper[4783]: E0930 14:25:53.844254 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:26:06 crc kubenswrapper[4783]: I0930 14:26:06.843499 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:26:06 crc kubenswrapper[4783]: E0930 14:26:06.844584 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:26:21 crc kubenswrapper[4783]: I0930 14:26:21.843134 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:26:21 crc kubenswrapper[4783]: E0930 14:26:21.843797 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:26:35 crc kubenswrapper[4783]: I0930 14:26:35.843659 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:26:35 crc kubenswrapper[4783]: E0930 14:26:35.844638 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:26:49 crc kubenswrapper[4783]: I0930 14:26:49.843549 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:26:49 crc kubenswrapper[4783]: E0930 14:26:49.844402 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:27:02 crc kubenswrapper[4783]: I0930 14:27:02.845859 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:27:02 crc kubenswrapper[4783]: E0930 14:27:02.847305 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:27:17 crc kubenswrapper[4783]: I0930 14:27:17.843330 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:27:17 crc kubenswrapper[4783]: E0930 14:27:17.844066 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:27:30 crc kubenswrapper[4783]: I0930 14:27:30.849290 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:27:30 crc kubenswrapper[4783]: E0930 14:27:30.850082 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:27:45 crc kubenswrapper[4783]: I0930 14:27:45.843048 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:27:45 crc kubenswrapper[4783]: E0930 14:27:45.843794 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:27:59 crc kubenswrapper[4783]: I0930 14:27:59.843646 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:27:59 crc kubenswrapper[4783]: E0930 14:27:59.844670 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:28:10 crc kubenswrapper[4783]: I0930 14:28:10.858102 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:28:10 crc kubenswrapper[4783]: E0930 14:28:10.859895 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:28:24 crc kubenswrapper[4783]: I0930 14:28:24.842627 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:28:24 crc kubenswrapper[4783]: E0930 14:28:24.843396 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:28:37 crc kubenswrapper[4783]: I0930 14:28:37.843263 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:28:37 crc kubenswrapper[4783]: E0930 14:28:37.843834 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:28:49 crc kubenswrapper[4783]: I0930 14:28:49.843293 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:28:49 crc kubenswrapper[4783]: E0930 14:28:49.844162 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:29:02 crc kubenswrapper[4783]: I0930 14:29:02.847310 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:29:02 crc kubenswrapper[4783]: E0930 14:29:02.849493 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:29:17 crc kubenswrapper[4783]: I0930 14:29:17.842984 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:29:17 crc kubenswrapper[4783]: E0930 14:29:17.843819 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:29:30 crc kubenswrapper[4783]: I0930 14:29:30.846870 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:29:30 crc kubenswrapper[4783]: E0930 14:29:30.847749 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:29:43 crc kubenswrapper[4783]: I0930 14:29:43.844577 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:29:43 crc kubenswrapper[4783]: E0930 14:29:43.845190 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:29:54 crc kubenswrapper[4783]: I0930 14:29:54.843023 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:29:54 crc kubenswrapper[4783]: E0930 14:29:54.844085 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.176512 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p"] Sep 30 14:30:00 crc kubenswrapper[4783]: E0930 14:30:00.177299 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="extract-utilities" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.177316 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="extract-utilities" Sep 30 14:30:00 crc kubenswrapper[4783]: E0930 14:30:00.177351 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="extract-content" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.177360 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="extract-content" Sep 30 14:30:00 crc kubenswrapper[4783]: E0930 14:30:00.177385 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="registry-server" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.177393 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="registry-server" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.177575 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d95759f0-fe8c-457d-a056-8d1e47908ba0" containerName="registry-server" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.178266 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.181935 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.184805 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.214545 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p"] Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.241173 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.241513 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.241604 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxjdm\" (UniqueName: \"kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.342955 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.343022 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.343048 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxjdm\" (UniqueName: \"kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.343966 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.349316 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.362118 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxjdm\" (UniqueName: \"kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm\") pod \"collect-profiles-29320710-kkv2p\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.512212 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:00 crc kubenswrapper[4783]: I0930 14:30:00.934500 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p"] Sep 30 14:30:01 crc kubenswrapper[4783]: I0930 14:30:01.377714 4783 generic.go:334] "Generic (PLEG): container finished" podID="8c4f99f8-995d-478e-bee7-0fb19f14902c" containerID="2d22f0e1bb9a63dd1d804cb417605f85c551fe9b819969b9efcab6955ae6595f" exitCode=0 Sep 30 14:30:01 crc kubenswrapper[4783]: I0930 14:30:01.377847 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" event={"ID":"8c4f99f8-995d-478e-bee7-0fb19f14902c","Type":"ContainerDied","Data":"2d22f0e1bb9a63dd1d804cb417605f85c551fe9b819969b9efcab6955ae6595f"} Sep 30 14:30:01 crc kubenswrapper[4783]: I0930 14:30:01.378160 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" event={"ID":"8c4f99f8-995d-478e-bee7-0fb19f14902c","Type":"ContainerStarted","Data":"d18f3e43e6659e7fe0e8ad75d0f9697f6b7ff1f6695cc4d9b3c3bf205c80e320"} Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.632097 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.781294 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume\") pod \"8c4f99f8-995d-478e-bee7-0fb19f14902c\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.781482 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume\") pod \"8c4f99f8-995d-478e-bee7-0fb19f14902c\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.781523 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxjdm\" (UniqueName: \"kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm\") pod \"8c4f99f8-995d-478e-bee7-0fb19f14902c\" (UID: \"8c4f99f8-995d-478e-bee7-0fb19f14902c\") " Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.782183 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume" (OuterVolumeSpecName: "config-volume") pod "8c4f99f8-995d-478e-bee7-0fb19f14902c" (UID: "8c4f99f8-995d-478e-bee7-0fb19f14902c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.786967 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm" (OuterVolumeSpecName: "kube-api-access-zxjdm") pod "8c4f99f8-995d-478e-bee7-0fb19f14902c" (UID: "8c4f99f8-995d-478e-bee7-0fb19f14902c"). InnerVolumeSpecName "kube-api-access-zxjdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.787283 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8c4f99f8-995d-478e-bee7-0fb19f14902c" (UID: "8c4f99f8-995d-478e-bee7-0fb19f14902c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.882834 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4f99f8-995d-478e-bee7-0fb19f14902c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.882863 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4f99f8-995d-478e-bee7-0fb19f14902c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:02 crc kubenswrapper[4783]: I0930 14:30:02.882874 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxjdm\" (UniqueName: \"kubernetes.io/projected/8c4f99f8-995d-478e-bee7-0fb19f14902c-kube-api-access-zxjdm\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:03 crc kubenswrapper[4783]: I0930 14:30:03.393206 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" event={"ID":"8c4f99f8-995d-478e-bee7-0fb19f14902c","Type":"ContainerDied","Data":"d18f3e43e6659e7fe0e8ad75d0f9697f6b7ff1f6695cc4d9b3c3bf205c80e320"} Sep 30 14:30:03 crc kubenswrapper[4783]: I0930 14:30:03.393259 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d18f3e43e6659e7fe0e8ad75d0f9697f6b7ff1f6695cc4d9b3c3bf205c80e320" Sep 30 14:30:03 crc kubenswrapper[4783]: I0930 14:30:03.393268 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p" Sep 30 14:30:03 crc kubenswrapper[4783]: I0930 14:30:03.704983 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8"] Sep 30 14:30:03 crc kubenswrapper[4783]: I0930 14:30:03.710010 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320665-9bzw8"] Sep 30 14:30:04 crc kubenswrapper[4783]: I0930 14:30:04.854282 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90c7cdb2-f935-4694-ad68-07ea73c25b70" path="/var/lib/kubelet/pods/90c7cdb2-f935-4694-ad68-07ea73c25b70/volumes" Sep 30 14:30:09 crc kubenswrapper[4783]: I0930 14:30:09.843043 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:30:09 crc kubenswrapper[4783]: E0930 14:30:09.844188 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.661591 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:15 crc kubenswrapper[4783]: E0930 14:30:15.662241 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4f99f8-995d-478e-bee7-0fb19f14902c" containerName="collect-profiles" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.662253 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4f99f8-995d-478e-bee7-0fb19f14902c" containerName="collect-profiles" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.662398 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4f99f8-995d-478e-bee7-0fb19f14902c" containerName="collect-profiles" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.663388 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.682058 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.762898 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.762958 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5lt8\" (UniqueName: \"kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.763015 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.865059 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.865455 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.865577 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5lt8\" (UniqueName: \"kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.865874 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.866186 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.884989 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5lt8\" (UniqueName: \"kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8\") pod \"redhat-operators-f4js7\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:15 crc kubenswrapper[4783]: I0930 14:30:15.986843 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:16 crc kubenswrapper[4783]: I0930 14:30:16.454134 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:16 crc kubenswrapper[4783]: I0930 14:30:16.491600 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerStarted","Data":"3af50c60b6f5f2a1552f5895ddfddad8f4bf5dd72eb073a27d7e8715f9a1fdaf"} Sep 30 14:30:17 crc kubenswrapper[4783]: I0930 14:30:17.500406 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerID="9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b" exitCode=0 Sep 30 14:30:17 crc kubenswrapper[4783]: I0930 14:30:17.500518 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerDied","Data":"9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b"} Sep 30 14:30:17 crc kubenswrapper[4783]: I0930 14:30:17.503183 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:30:17 crc kubenswrapper[4783]: I0930 14:30:17.959899 4783 scope.go:117] "RemoveContainer" containerID="ce9821bdc097f0dfbbc3c212ef02aef00edf94fe6b99760a1b12dc407c0633f9" Sep 30 14:30:18 crc kubenswrapper[4783]: I0930 14:30:18.512612 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerStarted","Data":"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292"} Sep 30 14:30:19 crc kubenswrapper[4783]: I0930 14:30:19.527759 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerID="dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292" exitCode=0 Sep 30 14:30:19 crc kubenswrapper[4783]: I0930 14:30:19.527812 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerDied","Data":"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292"} Sep 30 14:30:20 crc kubenswrapper[4783]: I0930 14:30:20.535977 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerStarted","Data":"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802"} Sep 30 14:30:20 crc kubenswrapper[4783]: I0930 14:30:20.557005 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f4js7" podStartSLOduration=3.104765262 podStartE2EDuration="5.556987735s" podCreationTimestamp="2025-09-30 14:30:15 +0000 UTC" firstStartedPulling="2025-09-30 14:30:17.502838575 +0000 UTC m=+3317.434304892" lastFinishedPulling="2025-09-30 14:30:19.955061058 +0000 UTC m=+3319.886527365" observedRunningTime="2025-09-30 14:30:20.555445075 +0000 UTC m=+3320.486911392" watchObservedRunningTime="2025-09-30 14:30:20.556987735 +0000 UTC m=+3320.488454052" Sep 30 14:30:23 crc kubenswrapper[4783]: I0930 14:30:23.843710 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:30:23 crc kubenswrapper[4783]: E0930 14:30:23.844412 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:30:25 crc kubenswrapper[4783]: I0930 14:30:25.987163 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:25 crc kubenswrapper[4783]: I0930 14:30:25.987607 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:26 crc kubenswrapper[4783]: I0930 14:30:26.030560 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:26 crc kubenswrapper[4783]: I0930 14:30:26.642883 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:26 crc kubenswrapper[4783]: I0930 14:30:26.691573 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:28 crc kubenswrapper[4783]: I0930 14:30:28.606736 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f4js7" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="registry-server" containerID="cri-o://5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802" gracePeriod=2 Sep 30 14:30:28 crc kubenswrapper[4783]: I0930 14:30:28.992516 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.054266 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content\") pod \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.054337 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities\") pod \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.054465 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5lt8\" (UniqueName: \"kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8\") pod \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\" (UID: \"fe3b41a3-f3b9-474b-af5f-b00a39c83df4\") " Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.055533 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities" (OuterVolumeSpecName: "utilities") pod "fe3b41a3-f3b9-474b-af5f-b00a39c83df4" (UID: "fe3b41a3-f3b9-474b-af5f-b00a39c83df4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.061550 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8" (OuterVolumeSpecName: "kube-api-access-t5lt8") pod "fe3b41a3-f3b9-474b-af5f-b00a39c83df4" (UID: "fe3b41a3-f3b9-474b-af5f-b00a39c83df4"). InnerVolumeSpecName "kube-api-access-t5lt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.156245 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5lt8\" (UniqueName: \"kubernetes.io/projected/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-kube-api-access-t5lt8\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.156609 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.620407 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerID="5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802" exitCode=0 Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.620464 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerDied","Data":"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802"} Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.620506 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4js7" event={"ID":"fe3b41a3-f3b9-474b-af5f-b00a39c83df4","Type":"ContainerDied","Data":"3af50c60b6f5f2a1552f5895ddfddad8f4bf5dd72eb073a27d7e8715f9a1fdaf"} Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.620534 4783 scope.go:117] "RemoveContainer" containerID="5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.620534 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4js7" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.642337 4783 scope.go:117] "RemoveContainer" containerID="dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.661607 4783 scope.go:117] "RemoveContainer" containerID="9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.681443 4783 scope.go:117] "RemoveContainer" containerID="5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802" Sep 30 14:30:29 crc kubenswrapper[4783]: E0930 14:30:29.681889 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802\": container with ID starting with 5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802 not found: ID does not exist" containerID="5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.681944 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802"} err="failed to get container status \"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802\": rpc error: code = NotFound desc = could not find container \"5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802\": container with ID starting with 5bbd2c13941fd1bb6669e21b6bf4064827813164c90df9726b5affa1efd43802 not found: ID does not exist" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.681979 4783 scope.go:117] "RemoveContainer" containerID="dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292" Sep 30 14:30:29 crc kubenswrapper[4783]: E0930 14:30:29.683069 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292\": container with ID starting with dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292 not found: ID does not exist" containerID="dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.683096 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292"} err="failed to get container status \"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292\": rpc error: code = NotFound desc = could not find container \"dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292\": container with ID starting with dda328681812ddf1cd66f04dee96cb44d1f16180950502a5f9cb827396add292 not found: ID does not exist" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.683114 4783 scope.go:117] "RemoveContainer" containerID="9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b" Sep 30 14:30:29 crc kubenswrapper[4783]: E0930 14:30:29.683342 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b\": container with ID starting with 9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b not found: ID does not exist" containerID="9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b" Sep 30 14:30:29 crc kubenswrapper[4783]: I0930 14:30:29.683364 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b"} err="failed to get container status \"9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b\": rpc error: code = NotFound desc = could not find container \"9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b\": container with ID starting with 9d9e52679b0274ec145761cdb31168dc300785e58fe6f6d61cd82e87e5a1396b not found: ID does not exist" Sep 30 14:30:30 crc kubenswrapper[4783]: I0930 14:30:30.186141 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe3b41a3-f3b9-474b-af5f-b00a39c83df4" (UID: "fe3b41a3-f3b9-474b-af5f-b00a39c83df4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:30:30 crc kubenswrapper[4783]: I0930 14:30:30.252309 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:30 crc kubenswrapper[4783]: I0930 14:30:30.257967 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f4js7"] Sep 30 14:30:30 crc kubenswrapper[4783]: I0930 14:30:30.273214 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe3b41a3-f3b9-474b-af5f-b00a39c83df4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:30 crc kubenswrapper[4783]: I0930 14:30:30.851897 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" path="/var/lib/kubelet/pods/fe3b41a3-f3b9-474b-af5f-b00a39c83df4/volumes" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.886544 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:32 crc kubenswrapper[4783]: E0930 14:30:32.887060 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="extract-utilities" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.887079 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="extract-utilities" Sep 30 14:30:32 crc kubenswrapper[4783]: E0930 14:30:32.887104 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="extract-content" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.887113 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="extract-content" Sep 30 14:30:32 crc kubenswrapper[4783]: E0930 14:30:32.887148 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="registry-server" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.887156 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="registry-server" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.887347 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3b41a3-f3b9-474b-af5f-b00a39c83df4" containerName="registry-server" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.889340 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:32 crc kubenswrapper[4783]: I0930 14:30:32.894982 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.013508 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.013550 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.013631 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whrq8\" (UniqueName: \"kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.114847 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.114889 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.114957 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whrq8\" (UniqueName: \"kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.115732 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.115958 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.135659 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whrq8\" (UniqueName: \"kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8\") pod \"certified-operators-mk2jv\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.209500 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.503534 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:33 crc kubenswrapper[4783]: I0930 14:30:33.649678 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerStarted","Data":"1f47320074ff13f64c42ffa0869d1d14524de97e3e16ce150378968c56073bdb"} Sep 30 14:30:34 crc kubenswrapper[4783]: I0930 14:30:34.659683 4783 generic.go:334] "Generic (PLEG): container finished" podID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerID="2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020" exitCode=0 Sep 30 14:30:34 crc kubenswrapper[4783]: I0930 14:30:34.659755 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerDied","Data":"2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020"} Sep 30 14:30:35 crc kubenswrapper[4783]: I0930 14:30:35.668610 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerStarted","Data":"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea"} Sep 30 14:30:36 crc kubenswrapper[4783]: I0930 14:30:36.677249 4783 generic.go:334] "Generic (PLEG): container finished" podID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerID="45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea" exitCode=0 Sep 30 14:30:36 crc kubenswrapper[4783]: I0930 14:30:36.677350 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerDied","Data":"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea"} Sep 30 14:30:37 crc kubenswrapper[4783]: I0930 14:30:37.686327 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerStarted","Data":"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f"} Sep 30 14:30:37 crc kubenswrapper[4783]: I0930 14:30:37.706496 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mk2jv" podStartSLOduration=3.260853605 podStartE2EDuration="5.706473426s" podCreationTimestamp="2025-09-30 14:30:32 +0000 UTC" firstStartedPulling="2025-09-30 14:30:34.661588583 +0000 UTC m=+3334.593054890" lastFinishedPulling="2025-09-30 14:30:37.107208394 +0000 UTC m=+3337.038674711" observedRunningTime="2025-09-30 14:30:37.701868788 +0000 UTC m=+3337.633335115" watchObservedRunningTime="2025-09-30 14:30:37.706473426 +0000 UTC m=+3337.637939733" Sep 30 14:30:37 crc kubenswrapper[4783]: I0930 14:30:37.843039 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:30:38 crc kubenswrapper[4783]: I0930 14:30:38.693584 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc"} Sep 30 14:30:43 crc kubenswrapper[4783]: I0930 14:30:43.210498 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:43 crc kubenswrapper[4783]: I0930 14:30:43.211853 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:43 crc kubenswrapper[4783]: I0930 14:30:43.263712 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:43 crc kubenswrapper[4783]: I0930 14:30:43.785092 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:43 crc kubenswrapper[4783]: I0930 14:30:43.829359 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:45 crc kubenswrapper[4783]: I0930 14:30:45.761088 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mk2jv" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="registry-server" containerID="cri-o://19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f" gracePeriod=2 Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.142798 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.195467 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities\") pod \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.195566 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whrq8\" (UniqueName: \"kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8\") pod \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.195665 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content\") pod \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\" (UID: \"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339\") " Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.198141 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities" (OuterVolumeSpecName: "utilities") pod "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" (UID: "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.202694 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8" (OuterVolumeSpecName: "kube-api-access-whrq8") pod "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" (UID: "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339"). InnerVolumeSpecName "kube-api-access-whrq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.297557 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.297603 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whrq8\" (UniqueName: \"kubernetes.io/projected/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-kube-api-access-whrq8\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.777601 4783 generic.go:334] "Generic (PLEG): container finished" podID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerID="19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f" exitCode=0 Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.778056 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerDied","Data":"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f"} Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.778093 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk2jv" event={"ID":"296b05aa-d4dd-42b9-ad70-a1fb3b9b5339","Type":"ContainerDied","Data":"1f47320074ff13f64c42ffa0869d1d14524de97e3e16ce150378968c56073bdb"} Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.778117 4783 scope.go:117] "RemoveContainer" containerID="19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.778335 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk2jv" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.793984 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" (UID: "296b05aa-d4dd-42b9-ad70-a1fb3b9b5339"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.805157 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.808837 4783 scope.go:117] "RemoveContainer" containerID="45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.833449 4783 scope.go:117] "RemoveContainer" containerID="2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.857997 4783 scope.go:117] "RemoveContainer" containerID="19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f" Sep 30 14:30:46 crc kubenswrapper[4783]: E0930 14:30:46.858516 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f\": container with ID starting with 19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f not found: ID does not exist" containerID="19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.858550 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f"} err="failed to get container status \"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f\": rpc error: code = NotFound desc = could not find container \"19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f\": container with ID starting with 19a3ab5163570c23733a781fcd2bb80dd244742479be376215abee5e57870a6f not found: ID does not exist" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.858574 4783 scope.go:117] "RemoveContainer" containerID="45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea" Sep 30 14:30:46 crc kubenswrapper[4783]: E0930 14:30:46.859081 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea\": container with ID starting with 45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea not found: ID does not exist" containerID="45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.859146 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea"} err="failed to get container status \"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea\": rpc error: code = NotFound desc = could not find container \"45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea\": container with ID starting with 45eba35382981298bbe2d4d68e501fe83e03e516525013611972309a31cf1eea not found: ID does not exist" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.859183 4783 scope.go:117] "RemoveContainer" containerID="2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020" Sep 30 14:30:46 crc kubenswrapper[4783]: E0930 14:30:46.859614 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020\": container with ID starting with 2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020 not found: ID does not exist" containerID="2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020" Sep 30 14:30:46 crc kubenswrapper[4783]: I0930 14:30:46.859648 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020"} err="failed to get container status \"2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020\": rpc error: code = NotFound desc = could not find container \"2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020\": container with ID starting with 2e7ab7a99e299913d995b511dfa2665f64c5f0fb9a6827f5b738632484066020 not found: ID does not exist" Sep 30 14:30:47 crc kubenswrapper[4783]: I0930 14:30:47.110196 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:47 crc kubenswrapper[4783]: I0930 14:30:47.115373 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mk2jv"] Sep 30 14:30:48 crc kubenswrapper[4783]: I0930 14:30:48.856337 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" path="/var/lib/kubelet/pods/296b05aa-d4dd-42b9-ad70-a1fb3b9b5339/volumes" Sep 30 14:32:37 crc kubenswrapper[4783]: I0930 14:32:37.674326 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:32:37 crc kubenswrapper[4783]: I0930 14:32:37.675020 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:33:07 crc kubenswrapper[4783]: I0930 14:33:07.674215 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:33:07 crc kubenswrapper[4783]: I0930 14:33:07.674961 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:33:37 crc kubenswrapper[4783]: I0930 14:33:37.674066 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:33:37 crc kubenswrapper[4783]: I0930 14:33:37.675466 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:33:37 crc kubenswrapper[4783]: I0930 14:33:37.675556 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:33:37 crc kubenswrapper[4783]: I0930 14:33:37.676421 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:33:37 crc kubenswrapper[4783]: I0930 14:33:37.676493 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc" gracePeriod=600 Sep 30 14:33:38 crc kubenswrapper[4783]: I0930 14:33:38.171700 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc" exitCode=0 Sep 30 14:33:38 crc kubenswrapper[4783]: I0930 14:33:38.171723 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc"} Sep 30 14:33:38 crc kubenswrapper[4783]: I0930 14:33:38.172169 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907"} Sep 30 14:33:38 crc kubenswrapper[4783]: I0930 14:33:38.172199 4783 scope.go:117] "RemoveContainer" containerID="31dfecd16a5e57db6e4dcf0ef68559a4f1cd0ed180ac0a6bfc14df451752e0f7" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.183321 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:33:53 crc kubenswrapper[4783]: E0930 14:33:53.184610 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="extract-content" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.184635 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="extract-content" Sep 30 14:33:53 crc kubenswrapper[4783]: E0930 14:33:53.184693 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="extract-utilities" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.184707 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="extract-utilities" Sep 30 14:33:53 crc kubenswrapper[4783]: E0930 14:33:53.184733 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="registry-server" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.184748 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="registry-server" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.185014 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="296b05aa-d4dd-42b9-ad70-a1fb3b9b5339" containerName="registry-server" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.187618 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.200798 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.296262 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxhbx\" (UniqueName: \"kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.296631 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.296712 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.398441 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxhbx\" (UniqueName: \"kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.398521 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.398647 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.399352 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.399384 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.422167 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxhbx\" (UniqueName: \"kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx\") pod \"community-operators-kvwwv\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:53 crc kubenswrapper[4783]: I0930 14:33:53.519755 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:33:54 crc kubenswrapper[4783]: I0930 14:33:54.062652 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:33:54 crc kubenswrapper[4783]: W0930 14:33:54.067037 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0d7dea8_e2ae_4e78_9720_f9f2db9645eb.slice/crio-74a4b3569095a528c71f9e38114ea24a11d87375c57861dc9854667efb96751e WatchSource:0}: Error finding container 74a4b3569095a528c71f9e38114ea24a11d87375c57861dc9854667efb96751e: Status 404 returned error can't find the container with id 74a4b3569095a528c71f9e38114ea24a11d87375c57861dc9854667efb96751e Sep 30 14:33:54 crc kubenswrapper[4783]: I0930 14:33:54.312065 4783 generic.go:334] "Generic (PLEG): container finished" podID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerID="fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d" exitCode=0 Sep 30 14:33:54 crc kubenswrapper[4783]: I0930 14:33:54.312142 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerDied","Data":"fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d"} Sep 30 14:33:54 crc kubenswrapper[4783]: I0930 14:33:54.312455 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerStarted","Data":"74a4b3569095a528c71f9e38114ea24a11d87375c57861dc9854667efb96751e"} Sep 30 14:33:56 crc kubenswrapper[4783]: I0930 14:33:56.331867 4783 generic.go:334] "Generic (PLEG): container finished" podID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerID="c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d" exitCode=0 Sep 30 14:33:56 crc kubenswrapper[4783]: I0930 14:33:56.332049 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerDied","Data":"c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d"} Sep 30 14:33:57 crc kubenswrapper[4783]: I0930 14:33:57.340621 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerStarted","Data":"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246"} Sep 30 14:33:57 crc kubenswrapper[4783]: I0930 14:33:57.366540 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kvwwv" podStartSLOduration=1.932166095 podStartE2EDuration="4.366519947s" podCreationTimestamp="2025-09-30 14:33:53 +0000 UTC" firstStartedPulling="2025-09-30 14:33:54.314126643 +0000 UTC m=+3534.245592950" lastFinishedPulling="2025-09-30 14:33:56.748480495 +0000 UTC m=+3536.679946802" observedRunningTime="2025-09-30 14:33:57.363455879 +0000 UTC m=+3537.294922196" watchObservedRunningTime="2025-09-30 14:33:57.366519947 +0000 UTC m=+3537.297986264" Sep 30 14:34:03 crc kubenswrapper[4783]: I0930 14:34:03.519969 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:03 crc kubenswrapper[4783]: I0930 14:34:03.520645 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:03 crc kubenswrapper[4783]: I0930 14:34:03.564402 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:04 crc kubenswrapper[4783]: I0930 14:34:04.438205 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:04 crc kubenswrapper[4783]: I0930 14:34:04.491089 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.410823 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kvwwv" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="registry-server" containerID="cri-o://c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246" gracePeriod=2 Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.787488 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.907501 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities\") pod \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.907603 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxhbx\" (UniqueName: \"kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx\") pod \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.907644 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content\") pod \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\" (UID: \"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb\") " Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.908668 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities" (OuterVolumeSpecName: "utilities") pod "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" (UID: "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.915017 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx" (OuterVolumeSpecName: "kube-api-access-gxhbx") pod "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" (UID: "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb"). InnerVolumeSpecName "kube-api-access-gxhbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:34:06 crc kubenswrapper[4783]: I0930 14:34:06.955848 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" (UID: "b0d7dea8-e2ae-4e78-9720-f9f2db9645eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.009305 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.009351 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxhbx\" (UniqueName: \"kubernetes.io/projected/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-kube-api-access-gxhbx\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.009367 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.419723 4783 generic.go:334] "Generic (PLEG): container finished" podID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerID="c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246" exitCode=0 Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.419786 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvwwv" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.419785 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerDied","Data":"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246"} Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.419937 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvwwv" event={"ID":"b0d7dea8-e2ae-4e78-9720-f9f2db9645eb","Type":"ContainerDied","Data":"74a4b3569095a528c71f9e38114ea24a11d87375c57861dc9854667efb96751e"} Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.419974 4783 scope.go:117] "RemoveContainer" containerID="c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.447366 4783 scope.go:117] "RemoveContainer" containerID="c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.457407 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.463825 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kvwwv"] Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.479576 4783 scope.go:117] "RemoveContainer" containerID="fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.497013 4783 scope.go:117] "RemoveContainer" containerID="c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246" Sep 30 14:34:07 crc kubenswrapper[4783]: E0930 14:34:07.497602 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246\": container with ID starting with c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246 not found: ID does not exist" containerID="c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.497651 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246"} err="failed to get container status \"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246\": rpc error: code = NotFound desc = could not find container \"c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246\": container with ID starting with c6cc97ad45dbc706feed67859b9aab26154b554c86c3ae59d8b7c8c7d67e8246 not found: ID does not exist" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.497686 4783 scope.go:117] "RemoveContainer" containerID="c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d" Sep 30 14:34:07 crc kubenswrapper[4783]: E0930 14:34:07.498063 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d\": container with ID starting with c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d not found: ID does not exist" containerID="c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.498091 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d"} err="failed to get container status \"c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d\": rpc error: code = NotFound desc = could not find container \"c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d\": container with ID starting with c891c931e1e4c88048ee60ca6638639d79d46d82eafa6e1ad30dba1376c5358d not found: ID does not exist" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.498109 4783 scope.go:117] "RemoveContainer" containerID="fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d" Sep 30 14:34:07 crc kubenswrapper[4783]: E0930 14:34:07.498553 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d\": container with ID starting with fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d not found: ID does not exist" containerID="fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d" Sep 30 14:34:07 crc kubenswrapper[4783]: I0930 14:34:07.498590 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d"} err="failed to get container status \"fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d\": rpc error: code = NotFound desc = could not find container \"fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d\": container with ID starting with fecb316ff418a4a4e325deb5c93d15780ea198aa923dfa9536bb83e0e4ca257d not found: ID does not exist" Sep 30 14:34:08 crc kubenswrapper[4783]: I0930 14:34:08.852010 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" path="/var/lib/kubelet/pods/b0d7dea8-e2ae-4e78-9720-f9f2db9645eb/volumes" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.651031 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:22 crc kubenswrapper[4783]: E0930 14:34:22.654683 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="extract-content" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.654808 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="extract-content" Sep 30 14:34:22 crc kubenswrapper[4783]: E0930 14:34:22.654910 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="extract-utilities" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.655001 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="extract-utilities" Sep 30 14:34:22 crc kubenswrapper[4783]: E0930 14:34:22.655097 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="registry-server" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.655179 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="registry-server" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.655456 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d7dea8-e2ae-4e78-9720-f9f2db9645eb" containerName="registry-server" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.656812 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.681819 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.745280 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbv52\" (UniqueName: \"kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.745358 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.745384 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.846409 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.846454 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.846501 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbv52\" (UniqueName: \"kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.846980 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.847100 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.869065 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbv52\" (UniqueName: \"kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52\") pod \"redhat-marketplace-kntdg\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:22 crc kubenswrapper[4783]: I0930 14:34:22.997381 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:23 crc kubenswrapper[4783]: I0930 14:34:23.417607 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:23 crc kubenswrapper[4783]: I0930 14:34:23.541901 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerStarted","Data":"f5294a7b36e07774e845faf658fe5425f09b15d3d8cb81d8ff7392379bfaf557"} Sep 30 14:34:24 crc kubenswrapper[4783]: I0930 14:34:24.552247 4783 generic.go:334] "Generic (PLEG): container finished" podID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerID="fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba" exitCode=0 Sep 30 14:34:24 crc kubenswrapper[4783]: I0930 14:34:24.552301 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerDied","Data":"fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba"} Sep 30 14:34:26 crc kubenswrapper[4783]: I0930 14:34:26.571015 4783 generic.go:334] "Generic (PLEG): container finished" podID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerID="e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144" exitCode=0 Sep 30 14:34:26 crc kubenswrapper[4783]: I0930 14:34:26.571069 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerDied","Data":"e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144"} Sep 30 14:34:27 crc kubenswrapper[4783]: I0930 14:34:27.583110 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerStarted","Data":"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0"} Sep 30 14:34:27 crc kubenswrapper[4783]: I0930 14:34:27.603592 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kntdg" podStartSLOduration=2.995286293 podStartE2EDuration="5.603574894s" podCreationTimestamp="2025-09-30 14:34:22 +0000 UTC" firstStartedPulling="2025-09-30 14:34:24.554089512 +0000 UTC m=+3564.485555819" lastFinishedPulling="2025-09-30 14:34:27.162378113 +0000 UTC m=+3567.093844420" observedRunningTime="2025-09-30 14:34:27.598625426 +0000 UTC m=+3567.530091733" watchObservedRunningTime="2025-09-30 14:34:27.603574894 +0000 UTC m=+3567.535041201" Sep 30 14:34:32 crc kubenswrapper[4783]: I0930 14:34:32.998049 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:32 crc kubenswrapper[4783]: I0930 14:34:32.998628 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:33 crc kubenswrapper[4783]: I0930 14:34:33.036319 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:33 crc kubenswrapper[4783]: I0930 14:34:33.673391 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:33 crc kubenswrapper[4783]: I0930 14:34:33.741319 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:35 crc kubenswrapper[4783]: I0930 14:34:35.647914 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kntdg" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="registry-server" containerID="cri-o://ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0" gracePeriod=2 Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.080267 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.147926 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content\") pod \"27624390-f02c-4dc6-a499-3a19e9e328dd\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.148055 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbv52\" (UniqueName: \"kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52\") pod \"27624390-f02c-4dc6-a499-3a19e9e328dd\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.148121 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities\") pod \"27624390-f02c-4dc6-a499-3a19e9e328dd\" (UID: \"27624390-f02c-4dc6-a499-3a19e9e328dd\") " Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.149153 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities" (OuterVolumeSpecName: "utilities") pod "27624390-f02c-4dc6-a499-3a19e9e328dd" (UID: "27624390-f02c-4dc6-a499-3a19e9e328dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.155005 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52" (OuterVolumeSpecName: "kube-api-access-gbv52") pod "27624390-f02c-4dc6-a499-3a19e9e328dd" (UID: "27624390-f02c-4dc6-a499-3a19e9e328dd"). InnerVolumeSpecName "kube-api-access-gbv52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.174553 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27624390-f02c-4dc6-a499-3a19e9e328dd" (UID: "27624390-f02c-4dc6-a499-3a19e9e328dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.249993 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.250042 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbv52\" (UniqueName: \"kubernetes.io/projected/27624390-f02c-4dc6-a499-3a19e9e328dd-kube-api-access-gbv52\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.250062 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27624390-f02c-4dc6-a499-3a19e9e328dd-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.658302 4783 generic.go:334] "Generic (PLEG): container finished" podID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerID="ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0" exitCode=0 Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.658352 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerDied","Data":"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0"} Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.658394 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntdg" event={"ID":"27624390-f02c-4dc6-a499-3a19e9e328dd","Type":"ContainerDied","Data":"f5294a7b36e07774e845faf658fe5425f09b15d3d8cb81d8ff7392379bfaf557"} Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.658414 4783 scope.go:117] "RemoveContainer" containerID="ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.658539 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntdg" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.685669 4783 scope.go:117] "RemoveContainer" containerID="e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.698814 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.703204 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntdg"] Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.722905 4783 scope.go:117] "RemoveContainer" containerID="fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.739614 4783 scope.go:117] "RemoveContainer" containerID="ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0" Sep 30 14:34:36 crc kubenswrapper[4783]: E0930 14:34:36.740056 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0\": container with ID starting with ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0 not found: ID does not exist" containerID="ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.740109 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0"} err="failed to get container status \"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0\": rpc error: code = NotFound desc = could not find container \"ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0\": container with ID starting with ba030f60100382e79404d092820ba7138cf229f0d7a519744ee294747a7d2bb0 not found: ID does not exist" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.740144 4783 scope.go:117] "RemoveContainer" containerID="e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144" Sep 30 14:34:36 crc kubenswrapper[4783]: E0930 14:34:36.740663 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144\": container with ID starting with e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144 not found: ID does not exist" containerID="e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.740704 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144"} err="failed to get container status \"e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144\": rpc error: code = NotFound desc = could not find container \"e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144\": container with ID starting with e45eb76248a43422c864abf8016c0bec828e75fe35f6b15fd5115970cbcc3144 not found: ID does not exist" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.740767 4783 scope.go:117] "RemoveContainer" containerID="fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba" Sep 30 14:34:36 crc kubenswrapper[4783]: E0930 14:34:36.741086 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba\": container with ID starting with fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba not found: ID does not exist" containerID="fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.741119 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba"} err="failed to get container status \"fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba\": rpc error: code = NotFound desc = could not find container \"fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba\": container with ID starting with fa764ebbf2edce69d362c4f7d8489fbc5059528dc46040e2757bc726e63788ba not found: ID does not exist" Sep 30 14:34:36 crc kubenswrapper[4783]: I0930 14:34:36.858893 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" path="/var/lib/kubelet/pods/27624390-f02c-4dc6-a499-3a19e9e328dd/volumes" Sep 30 14:35:37 crc kubenswrapper[4783]: I0930 14:35:37.673669 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:35:37 crc kubenswrapper[4783]: I0930 14:35:37.674380 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:36:07 crc kubenswrapper[4783]: I0930 14:36:07.674352 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:36:07 crc kubenswrapper[4783]: I0930 14:36:07.675293 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:36:37 crc kubenswrapper[4783]: I0930 14:36:37.674253 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:36:37 crc kubenswrapper[4783]: I0930 14:36:37.674836 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:36:37 crc kubenswrapper[4783]: I0930 14:36:37.675078 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:36:37 crc kubenswrapper[4783]: I0930 14:36:37.675877 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:36:37 crc kubenswrapper[4783]: I0930 14:36:37.676051 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" gracePeriod=600 Sep 30 14:36:37 crc kubenswrapper[4783]: E0930 14:36:37.795179 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:36:38 crc kubenswrapper[4783]: I0930 14:36:38.671505 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" exitCode=0 Sep 30 14:36:38 crc kubenswrapper[4783]: I0930 14:36:38.671569 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907"} Sep 30 14:36:38 crc kubenswrapper[4783]: I0930 14:36:38.671611 4783 scope.go:117] "RemoveContainer" containerID="c857c6dd52cbb507ff2bb19426adfd21763c964bfa72f936240cb9921c3258bc" Sep 30 14:36:38 crc kubenswrapper[4783]: I0930 14:36:38.672181 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:36:38 crc kubenswrapper[4783]: E0930 14:36:38.672525 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:36:51 crc kubenswrapper[4783]: I0930 14:36:51.843601 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:36:51 crc kubenswrapper[4783]: E0930 14:36:51.844332 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:37:06 crc kubenswrapper[4783]: I0930 14:37:06.843918 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:37:06 crc kubenswrapper[4783]: E0930 14:37:06.845358 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:37:18 crc kubenswrapper[4783]: I0930 14:37:18.843482 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:37:18 crc kubenswrapper[4783]: E0930 14:37:18.844463 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:37:31 crc kubenswrapper[4783]: I0930 14:37:31.843467 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:37:31 crc kubenswrapper[4783]: E0930 14:37:31.844734 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:37:42 crc kubenswrapper[4783]: I0930 14:37:42.843717 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:37:42 crc kubenswrapper[4783]: E0930 14:37:42.844447 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:37:56 crc kubenswrapper[4783]: I0930 14:37:56.844374 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:37:56 crc kubenswrapper[4783]: E0930 14:37:56.845698 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:38:09 crc kubenswrapper[4783]: I0930 14:38:09.843583 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:38:09 crc kubenswrapper[4783]: E0930 14:38:09.845679 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:38:20 crc kubenswrapper[4783]: I0930 14:38:20.849464 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:38:20 crc kubenswrapper[4783]: E0930 14:38:20.850823 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:38:35 crc kubenswrapper[4783]: I0930 14:38:35.844532 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:38:35 crc kubenswrapper[4783]: E0930 14:38:35.845766 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:38:47 crc kubenswrapper[4783]: I0930 14:38:47.843619 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:38:47 crc kubenswrapper[4783]: E0930 14:38:47.844598 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:39:02 crc kubenswrapper[4783]: I0930 14:39:02.843602 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:39:02 crc kubenswrapper[4783]: E0930 14:39:02.844541 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:39:17 crc kubenswrapper[4783]: I0930 14:39:17.843535 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:39:17 crc kubenswrapper[4783]: E0930 14:39:17.844624 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:39:28 crc kubenswrapper[4783]: I0930 14:39:28.844069 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:39:28 crc kubenswrapper[4783]: E0930 14:39:28.845097 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:39:43 crc kubenswrapper[4783]: I0930 14:39:43.844607 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:39:43 crc kubenswrapper[4783]: E0930 14:39:43.845723 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:39:58 crc kubenswrapper[4783]: I0930 14:39:58.843123 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:39:58 crc kubenswrapper[4783]: E0930 14:39:58.843815 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:40:13 crc kubenswrapper[4783]: I0930 14:40:13.844364 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:40:13 crc kubenswrapper[4783]: E0930 14:40:13.845365 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:40:28 crc kubenswrapper[4783]: I0930 14:40:28.843161 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:40:28 crc kubenswrapper[4783]: E0930 14:40:28.845176 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.536562 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:40 crc kubenswrapper[4783]: E0930 14:40:40.537997 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="extract-utilities" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.538028 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="extract-utilities" Sep 30 14:40:40 crc kubenswrapper[4783]: E0930 14:40:40.538059 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="extract-content" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.538072 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="extract-content" Sep 30 14:40:40 crc kubenswrapper[4783]: E0930 14:40:40.538123 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="registry-server" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.538136 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="registry-server" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.538507 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="27624390-f02c-4dc6-a499-3a19e9e328dd" containerName="registry-server" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.540868 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.573567 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.660049 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.660472 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpwtw\" (UniqueName: \"kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.660563 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.761657 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpwtw\" (UniqueName: \"kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.761712 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.761770 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.762353 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.762393 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.782723 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpwtw\" (UniqueName: \"kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw\") pod \"redhat-operators-v8q8g\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:40 crc kubenswrapper[4783]: I0930 14:40:40.868201 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.282027 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.704954 4783 generic.go:334] "Generic (PLEG): container finished" podID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerID="368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b" exitCode=0 Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.705216 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerDied","Data":"368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b"} Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.705309 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerStarted","Data":"5a0384fc2b0f806d649b557d5e2499d31d7d23a1c687021328a595158c024dba"} Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.707501 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:40:41 crc kubenswrapper[4783]: I0930 14:40:41.842917 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:40:41 crc kubenswrapper[4783]: E0930 14:40:41.843224 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:40:42 crc kubenswrapper[4783]: I0930 14:40:42.713361 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerStarted","Data":"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71"} Sep 30 14:40:43 crc kubenswrapper[4783]: I0930 14:40:43.722677 4783 generic.go:334] "Generic (PLEG): container finished" podID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerID="9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71" exitCode=0 Sep 30 14:40:43 crc kubenswrapper[4783]: I0930 14:40:43.722727 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerDied","Data":"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71"} Sep 30 14:40:44 crc kubenswrapper[4783]: I0930 14:40:44.730052 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerStarted","Data":"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590"} Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.499248 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v8q8g" podStartSLOduration=5.939793087 podStartE2EDuration="8.499208492s" podCreationTimestamp="2025-09-30 14:40:40 +0000 UTC" firstStartedPulling="2025-09-30 14:40:41.70722421 +0000 UTC m=+3941.638690517" lastFinishedPulling="2025-09-30 14:40:44.266639605 +0000 UTC m=+3944.198105922" observedRunningTime="2025-09-30 14:40:44.75043792 +0000 UTC m=+3944.681904227" watchObservedRunningTime="2025-09-30 14:40:48.499208492 +0000 UTC m=+3948.430674809" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.505163 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.507082 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.533046 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.592245 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.592356 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28nsz\" (UniqueName: \"kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.592415 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.693244 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.693328 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28nsz\" (UniqueName: \"kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.693385 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.693816 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.693992 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.714626 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28nsz\" (UniqueName: \"kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz\") pod \"certified-operators-jcc8n\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:48 crc kubenswrapper[4783]: I0930 14:40:48.832199 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:49 crc kubenswrapper[4783]: I0930 14:40:49.109486 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:40:49 crc kubenswrapper[4783]: I0930 14:40:49.775112 4783 generic.go:334] "Generic (PLEG): container finished" podID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerID="b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8" exitCode=0 Sep 30 14:40:49 crc kubenswrapper[4783]: I0930 14:40:49.775200 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerDied","Data":"b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8"} Sep 30 14:40:49 crc kubenswrapper[4783]: I0930 14:40:49.775423 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerStarted","Data":"ef72ab0ead023d3f477f8c3c4b78c3de8ca39fc17df27b1d447ed8394c7c75fd"} Sep 30 14:40:50 crc kubenswrapper[4783]: I0930 14:40:50.786104 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerStarted","Data":"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af"} Sep 30 14:40:50 crc kubenswrapper[4783]: I0930 14:40:50.869009 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:50 crc kubenswrapper[4783]: I0930 14:40:50.869111 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:50 crc kubenswrapper[4783]: I0930 14:40:50.915203 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:51 crc kubenswrapper[4783]: I0930 14:40:51.799196 4783 generic.go:334] "Generic (PLEG): container finished" podID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerID="9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af" exitCode=0 Sep 30 14:40:51 crc kubenswrapper[4783]: I0930 14:40:51.799367 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerDied","Data":"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af"} Sep 30 14:40:51 crc kubenswrapper[4783]: I0930 14:40:51.861851 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:52 crc kubenswrapper[4783]: I0930 14:40:52.807749 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerStarted","Data":"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4"} Sep 30 14:40:52 crc kubenswrapper[4783]: I0930 14:40:52.824687 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jcc8n" podStartSLOduration=2.234191681 podStartE2EDuration="4.824666391s" podCreationTimestamp="2025-09-30 14:40:48 +0000 UTC" firstStartedPulling="2025-09-30 14:40:49.786616586 +0000 UTC m=+3949.718082893" lastFinishedPulling="2025-09-30 14:40:52.377091296 +0000 UTC m=+3952.308557603" observedRunningTime="2025-09-30 14:40:52.82150765 +0000 UTC m=+3952.752973957" watchObservedRunningTime="2025-09-30 14:40:52.824666391 +0000 UTC m=+3952.756132688" Sep 30 14:40:52 crc kubenswrapper[4783]: I0930 14:40:52.843209 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:40:52 crc kubenswrapper[4783]: E0930 14:40:52.843463 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:40:53 crc kubenswrapper[4783]: I0930 14:40:53.288139 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:53 crc kubenswrapper[4783]: I0930 14:40:53.814694 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v8q8g" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="registry-server" containerID="cri-o://8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590" gracePeriod=2 Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.197659 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.274841 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities\") pod \"788f6b59-881e-42f7-b0df-0e4671d74c4f\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.274927 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content\") pod \"788f6b59-881e-42f7-b0df-0e4671d74c4f\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.275029 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpwtw\" (UniqueName: \"kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw\") pod \"788f6b59-881e-42f7-b0df-0e4671d74c4f\" (UID: \"788f6b59-881e-42f7-b0df-0e4671d74c4f\") " Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.275619 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities" (OuterVolumeSpecName: "utilities") pod "788f6b59-881e-42f7-b0df-0e4671d74c4f" (UID: "788f6b59-881e-42f7-b0df-0e4671d74c4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.281395 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw" (OuterVolumeSpecName: "kube-api-access-qpwtw") pod "788f6b59-881e-42f7-b0df-0e4671d74c4f" (UID: "788f6b59-881e-42f7-b0df-0e4671d74c4f"). InnerVolumeSpecName "kube-api-access-qpwtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.365047 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "788f6b59-881e-42f7-b0df-0e4671d74c4f" (UID: "788f6b59-881e-42f7-b0df-0e4671d74c4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.376563 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpwtw\" (UniqueName: \"kubernetes.io/projected/788f6b59-881e-42f7-b0df-0e4671d74c4f-kube-api-access-qpwtw\") on node \"crc\" DevicePath \"\"" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.376593 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.376604 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788f6b59-881e-42f7-b0df-0e4671d74c4f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.822729 4783 generic.go:334] "Generic (PLEG): container finished" podID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerID="8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590" exitCode=0 Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.822760 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8q8g" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.822788 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerDied","Data":"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590"} Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.822832 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8q8g" event={"ID":"788f6b59-881e-42f7-b0df-0e4671d74c4f","Type":"ContainerDied","Data":"5a0384fc2b0f806d649b557d5e2499d31d7d23a1c687021328a595158c024dba"} Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.822876 4783 scope.go:117] "RemoveContainer" containerID="8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.844074 4783 scope.go:117] "RemoveContainer" containerID="9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.857589 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.857631 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v8q8g"] Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.868205 4783 scope.go:117] "RemoveContainer" containerID="368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.884582 4783 scope.go:117] "RemoveContainer" containerID="8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590" Sep 30 14:40:54 crc kubenswrapper[4783]: E0930 14:40:54.885211 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590\": container with ID starting with 8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590 not found: ID does not exist" containerID="8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.885288 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590"} err="failed to get container status \"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590\": rpc error: code = NotFound desc = could not find container \"8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590\": container with ID starting with 8ec084b1f6c56300dc783c8ad98d3506692d0062ed84896b6ca7558b181f6590 not found: ID does not exist" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.885320 4783 scope.go:117] "RemoveContainer" containerID="9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71" Sep 30 14:40:54 crc kubenswrapper[4783]: E0930 14:40:54.885652 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71\": container with ID starting with 9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71 not found: ID does not exist" containerID="9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.885747 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71"} err="failed to get container status \"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71\": rpc error: code = NotFound desc = could not find container \"9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71\": container with ID starting with 9b8cdf417245ddefe5853ae04293336c14253791b20d3dea52e49853ebf09b71 not found: ID does not exist" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.885841 4783 scope.go:117] "RemoveContainer" containerID="368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b" Sep 30 14:40:54 crc kubenswrapper[4783]: E0930 14:40:54.886154 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b\": container with ID starting with 368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b not found: ID does not exist" containerID="368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b" Sep 30 14:40:54 crc kubenswrapper[4783]: I0930 14:40:54.886177 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b"} err="failed to get container status \"368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b\": rpc error: code = NotFound desc = could not find container \"368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b\": container with ID starting with 368a20bb30b23488ea7877d76ef5aa63b2ac68f348dd5bc9d58ef9f1d9cecb0b not found: ID does not exist" Sep 30 14:40:56 crc kubenswrapper[4783]: I0930 14:40:56.852025 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" path="/var/lib/kubelet/pods/788f6b59-881e-42f7-b0df-0e4671d74c4f/volumes" Sep 30 14:40:58 crc kubenswrapper[4783]: I0930 14:40:58.832846 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:58 crc kubenswrapper[4783]: I0930 14:40:58.832906 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:58 crc kubenswrapper[4783]: I0930 14:40:58.899697 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:58 crc kubenswrapper[4783]: I0930 14:40:58.942646 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:40:59 crc kubenswrapper[4783]: I0930 14:40:59.157797 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:41:00 crc kubenswrapper[4783]: I0930 14:41:00.876154 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jcc8n" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="registry-server" containerID="cri-o://f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4" gracePeriod=2 Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.253145 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.275737 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content\") pod \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.275815 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28nsz\" (UniqueName: \"kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz\") pod \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.275848 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities\") pod \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\" (UID: \"5e9e0dd8-2589-45c2-af8d-8bdf4317e652\") " Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.277251 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities" (OuterVolumeSpecName: "utilities") pod "5e9e0dd8-2589-45c2-af8d-8bdf4317e652" (UID: "5e9e0dd8-2589-45c2-af8d-8bdf4317e652"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.286424 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz" (OuterVolumeSpecName: "kube-api-access-28nsz") pod "5e9e0dd8-2589-45c2-af8d-8bdf4317e652" (UID: "5e9e0dd8-2589-45c2-af8d-8bdf4317e652"). InnerVolumeSpecName "kube-api-access-28nsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.377585 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28nsz\" (UniqueName: \"kubernetes.io/projected/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-kube-api-access-28nsz\") on node \"crc\" DevicePath \"\"" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.377620 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.424731 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5e9e0dd8-2589-45c2-af8d-8bdf4317e652" (UID: "5e9e0dd8-2589-45c2-af8d-8bdf4317e652"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.479311 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e9e0dd8-2589-45c2-af8d-8bdf4317e652-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.890413 4783 generic.go:334] "Generic (PLEG): container finished" podID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerID="f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4" exitCode=0 Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.890497 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerDied","Data":"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4"} Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.890516 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jcc8n" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.890560 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jcc8n" event={"ID":"5e9e0dd8-2589-45c2-af8d-8bdf4317e652","Type":"ContainerDied","Data":"ef72ab0ead023d3f477f8c3c4b78c3de8ca39fc17df27b1d447ed8394c7c75fd"} Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.890597 4783 scope.go:117] "RemoveContainer" containerID="f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.927176 4783 scope.go:117] "RemoveContainer" containerID="9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.963296 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.967624 4783 scope.go:117] "RemoveContainer" containerID="b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.972516 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jcc8n"] Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.992840 4783 scope.go:117] "RemoveContainer" containerID="f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4" Sep 30 14:41:01 crc kubenswrapper[4783]: E0930 14:41:01.993243 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4\": container with ID starting with f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4 not found: ID does not exist" containerID="f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.993277 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4"} err="failed to get container status \"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4\": rpc error: code = NotFound desc = could not find container \"f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4\": container with ID starting with f4916871d8f3e43ac06600eb7f80b5ee8d123adc487fd9fc034ffb6b4e74a2c4 not found: ID does not exist" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.993300 4783 scope.go:117] "RemoveContainer" containerID="9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af" Sep 30 14:41:01 crc kubenswrapper[4783]: E0930 14:41:01.993543 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af\": container with ID starting with 9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af not found: ID does not exist" containerID="9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.993589 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af"} err="failed to get container status \"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af\": rpc error: code = NotFound desc = could not find container \"9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af\": container with ID starting with 9215acba41edc87b5250fe22999049ec24b4522ce0c4bef695910266b7bec6af not found: ID does not exist" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.993611 4783 scope.go:117] "RemoveContainer" containerID="b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8" Sep 30 14:41:01 crc kubenswrapper[4783]: E0930 14:41:01.993914 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8\": container with ID starting with b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8 not found: ID does not exist" containerID="b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8" Sep 30 14:41:01 crc kubenswrapper[4783]: I0930 14:41:01.993957 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8"} err="failed to get container status \"b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8\": rpc error: code = NotFound desc = could not find container \"b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8\": container with ID starting with b16227fa01388a4df718af56890998324cfdbc7984362cd308c541ec43b77bc8 not found: ID does not exist" Sep 30 14:41:02 crc kubenswrapper[4783]: I0930 14:41:02.874873 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" path="/var/lib/kubelet/pods/5e9e0dd8-2589-45c2-af8d-8bdf4317e652/volumes" Sep 30 14:41:06 crc kubenswrapper[4783]: I0930 14:41:06.843304 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:41:06 crc kubenswrapper[4783]: E0930 14:41:06.845203 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:41:17 crc kubenswrapper[4783]: I0930 14:41:17.844048 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:41:17 crc kubenswrapper[4783]: E0930 14:41:17.845158 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:41:30 crc kubenswrapper[4783]: I0930 14:41:30.852262 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:41:30 crc kubenswrapper[4783]: E0930 14:41:30.852972 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:41:43 crc kubenswrapper[4783]: I0930 14:41:43.843949 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:41:44 crc kubenswrapper[4783]: I0930 14:41:44.302637 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da"} Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.540482 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.547136 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.547565 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.547735 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.547867 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.548005 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="extract-utilities" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.548134 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="extract-utilities" Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.548340 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="extract-content" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.548525 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="extract-content" Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.548741 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="extract-utilities" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.548904 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="extract-utilities" Sep 30 14:43:54 crc kubenswrapper[4783]: E0930 14:43:54.549037 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="extract-content" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.549160 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="extract-content" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.549691 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e9e0dd8-2589-45c2-af8d-8bdf4317e652" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.549877 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="788f6b59-881e-42f7-b0df-0e4671d74c4f" containerName="registry-server" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.552104 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.574802 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.586160 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzjbq\" (UniqueName: \"kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.586280 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.586381 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.687614 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.687723 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzjbq\" (UniqueName: \"kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.687758 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.688427 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.688438 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.709440 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzjbq\" (UniqueName: \"kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq\") pod \"community-operators-b4t2q\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:54 crc kubenswrapper[4783]: I0930 14:43:54.890389 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:43:55 crc kubenswrapper[4783]: I0930 14:43:55.186838 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:43:55 crc kubenswrapper[4783]: I0930 14:43:55.515078 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerStarted","Data":"943d43152a3b28c3a9ad709a51167d12ae188f6cd35aaac19a5c315649afa970"} Sep 30 14:43:56 crc kubenswrapper[4783]: I0930 14:43:56.526612 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerID="0a2da62abf6f1ae87dbf4684c7cb508ad257ce9ef91231bfd605004e0325a0f0" exitCode=0 Sep 30 14:43:56 crc kubenswrapper[4783]: I0930 14:43:56.526712 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerDied","Data":"0a2da62abf6f1ae87dbf4684c7cb508ad257ce9ef91231bfd605004e0325a0f0"} Sep 30 14:43:58 crc kubenswrapper[4783]: I0930 14:43:58.569594 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerID="75c4981b060fc221e201e58878fde4d8c479d6a37a35beaef04d7e5e91b202c5" exitCode=0 Sep 30 14:43:58 crc kubenswrapper[4783]: I0930 14:43:58.569632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerDied","Data":"75c4981b060fc221e201e58878fde4d8c479d6a37a35beaef04d7e5e91b202c5"} Sep 30 14:43:59 crc kubenswrapper[4783]: I0930 14:43:59.582131 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerStarted","Data":"4931836af163bf1ffd49480b8900033a923bddd4c15caab14738dac28c63bad3"} Sep 30 14:43:59 crc kubenswrapper[4783]: I0930 14:43:59.607878 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b4t2q" podStartSLOduration=3.042311739 podStartE2EDuration="5.607855581s" podCreationTimestamp="2025-09-30 14:43:54 +0000 UTC" firstStartedPulling="2025-09-30 14:43:56.529186076 +0000 UTC m=+4136.460652383" lastFinishedPulling="2025-09-30 14:43:59.094729918 +0000 UTC m=+4139.026196225" observedRunningTime="2025-09-30 14:43:59.602533981 +0000 UTC m=+4139.534000298" watchObservedRunningTime="2025-09-30 14:43:59.607855581 +0000 UTC m=+4139.539321888" Sep 30 14:44:04 crc kubenswrapper[4783]: I0930 14:44:04.891006 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:04 crc kubenswrapper[4783]: I0930 14:44:04.891574 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:04 crc kubenswrapper[4783]: I0930 14:44:04.939142 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:05 crc kubenswrapper[4783]: I0930 14:44:05.696162 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:05 crc kubenswrapper[4783]: I0930 14:44:05.770020 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:44:07 crc kubenswrapper[4783]: I0930 14:44:07.650087 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b4t2q" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="registry-server" containerID="cri-o://4931836af163bf1ffd49480b8900033a923bddd4c15caab14738dac28c63bad3" gracePeriod=2 Sep 30 14:44:07 crc kubenswrapper[4783]: I0930 14:44:07.674394 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:44:07 crc kubenswrapper[4783]: I0930 14:44:07.674461 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:44:08 crc kubenswrapper[4783]: I0930 14:44:08.659628 4783 generic.go:334] "Generic (PLEG): container finished" podID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerID="4931836af163bf1ffd49480b8900033a923bddd4c15caab14738dac28c63bad3" exitCode=0 Sep 30 14:44:08 crc kubenswrapper[4783]: I0930 14:44:08.659722 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerDied","Data":"4931836af163bf1ffd49480b8900033a923bddd4c15caab14738dac28c63bad3"} Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.018248 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.129889 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities\") pod \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.130281 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzjbq\" (UniqueName: \"kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq\") pod \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.130395 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content\") pod \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\" (UID: \"2ef7d287-2c43-47e8-90b4-da2af1e40b84\") " Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.130741 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities" (OuterVolumeSpecName: "utilities") pod "2ef7d287-2c43-47e8-90b4-da2af1e40b84" (UID: "2ef7d287-2c43-47e8-90b4-da2af1e40b84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.135519 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq" (OuterVolumeSpecName: "kube-api-access-vzjbq") pod "2ef7d287-2c43-47e8-90b4-da2af1e40b84" (UID: "2ef7d287-2c43-47e8-90b4-da2af1e40b84"). InnerVolumeSpecName "kube-api-access-vzjbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.180956 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ef7d287-2c43-47e8-90b4-da2af1e40b84" (UID: "2ef7d287-2c43-47e8-90b4-da2af1e40b84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.232206 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzjbq\" (UniqueName: \"kubernetes.io/projected/2ef7d287-2c43-47e8-90b4-da2af1e40b84-kube-api-access-vzjbq\") on node \"crc\" DevicePath \"\"" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.232271 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.232282 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7d287-2c43-47e8-90b4-da2af1e40b84-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.671923 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b4t2q" event={"ID":"2ef7d287-2c43-47e8-90b4-da2af1e40b84","Type":"ContainerDied","Data":"943d43152a3b28c3a9ad709a51167d12ae188f6cd35aaac19a5c315649afa970"} Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.671992 4783 scope.go:117] "RemoveContainer" containerID="4931836af163bf1ffd49480b8900033a923bddd4c15caab14738dac28c63bad3" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.672002 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b4t2q" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.698450 4783 scope.go:117] "RemoveContainer" containerID="75c4981b060fc221e201e58878fde4d8c479d6a37a35beaef04d7e5e91b202c5" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.730321 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.733103 4783 scope.go:117] "RemoveContainer" containerID="0a2da62abf6f1ae87dbf4684c7cb508ad257ce9ef91231bfd605004e0325a0f0" Sep 30 14:44:09 crc kubenswrapper[4783]: I0930 14:44:09.741471 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b4t2q"] Sep 30 14:44:10 crc kubenswrapper[4783]: I0930 14:44:10.859373 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" path="/var/lib/kubelet/pods/2ef7d287-2c43-47e8-90b4-da2af1e40b84/volumes" Sep 30 14:44:37 crc kubenswrapper[4783]: I0930 14:44:37.674076 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:44:37 crc kubenswrapper[4783]: I0930 14:44:37.674634 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.168260 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z"] Sep 30 14:45:00 crc kubenswrapper[4783]: E0930 14:45:00.169322 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="extract-utilities" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.169348 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="extract-utilities" Sep 30 14:45:00 crc kubenswrapper[4783]: E0930 14:45:00.169363 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="extract-content" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.169375 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="extract-content" Sep 30 14:45:00 crc kubenswrapper[4783]: E0930 14:45:00.169423 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="registry-server" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.169435 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="registry-server" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.169674 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef7d287-2c43-47e8-90b4-da2af1e40b84" containerName="registry-server" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.170417 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.172455 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.176086 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.179325 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z"] Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.294910 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgqz5\" (UniqueName: \"kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.294996 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.295016 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.396836 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgqz5\" (UniqueName: \"kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.397327 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.397585 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.399056 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.408856 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.434580 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgqz5\" (UniqueName: \"kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5\") pod \"collect-profiles-29320725-wgj8z\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.494133 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:00 crc kubenswrapper[4783]: I0930 14:45:00.739777 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z"] Sep 30 14:45:01 crc kubenswrapper[4783]: I0930 14:45:01.129253 4783 generic.go:334] "Generic (PLEG): container finished" podID="9f943991-a019-456a-baad-94f5e8104f00" containerID="304bede3cc3ed742d444cf276db49a46d4fc5d4847624a40ad253b2f3a1d8867" exitCode=0 Sep 30 14:45:01 crc kubenswrapper[4783]: I0930 14:45:01.129344 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" event={"ID":"9f943991-a019-456a-baad-94f5e8104f00","Type":"ContainerDied","Data":"304bede3cc3ed742d444cf276db49a46d4fc5d4847624a40ad253b2f3a1d8867"} Sep 30 14:45:01 crc kubenswrapper[4783]: I0930 14:45:01.129723 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" event={"ID":"9f943991-a019-456a-baad-94f5e8104f00","Type":"ContainerStarted","Data":"a473f6ff3821edfd250367a6b841171f017c3a7398abf47fed43ce3b0619ed60"} Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.476164 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.634129 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume\") pod \"9f943991-a019-456a-baad-94f5e8104f00\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.634500 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume\") pod \"9f943991-a019-456a-baad-94f5e8104f00\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.634664 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgqz5\" (UniqueName: \"kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5\") pod \"9f943991-a019-456a-baad-94f5e8104f00\" (UID: \"9f943991-a019-456a-baad-94f5e8104f00\") " Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.635091 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume" (OuterVolumeSpecName: "config-volume") pod "9f943991-a019-456a-baad-94f5e8104f00" (UID: "9f943991-a019-456a-baad-94f5e8104f00"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.635908 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f943991-a019-456a-baad-94f5e8104f00-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.666490 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9f943991-a019-456a-baad-94f5e8104f00" (UID: "9f943991-a019-456a-baad-94f5e8104f00"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.666618 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5" (OuterVolumeSpecName: "kube-api-access-cgqz5") pod "9f943991-a019-456a-baad-94f5e8104f00" (UID: "9f943991-a019-456a-baad-94f5e8104f00"). InnerVolumeSpecName "kube-api-access-cgqz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.737385 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f943991-a019-456a-baad-94f5e8104f00-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 14:45:02 crc kubenswrapper[4783]: I0930 14:45:02.737429 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgqz5\" (UniqueName: \"kubernetes.io/projected/9f943991-a019-456a-baad-94f5e8104f00-kube-api-access-cgqz5\") on node \"crc\" DevicePath \"\"" Sep 30 14:45:03 crc kubenswrapper[4783]: I0930 14:45:03.151288 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" event={"ID":"9f943991-a019-456a-baad-94f5e8104f00","Type":"ContainerDied","Data":"a473f6ff3821edfd250367a6b841171f017c3a7398abf47fed43ce3b0619ed60"} Sep 30 14:45:03 crc kubenswrapper[4783]: I0930 14:45:03.151326 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a473f6ff3821edfd250367a6b841171f017c3a7398abf47fed43ce3b0619ed60" Sep 30 14:45:03 crc kubenswrapper[4783]: I0930 14:45:03.151333 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320725-wgj8z" Sep 30 14:45:03 crc kubenswrapper[4783]: I0930 14:45:03.559093 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s"] Sep 30 14:45:03 crc kubenswrapper[4783]: I0930 14:45:03.569078 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320680-w457s"] Sep 30 14:45:04 crc kubenswrapper[4783]: I0930 14:45:04.857628 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68e75c21-2820-4856-8e09-f353a6f0661a" path="/var/lib/kubelet/pods/68e75c21-2820-4856-8e09-f353a6f0661a/volumes" Sep 30 14:45:07 crc kubenswrapper[4783]: I0930 14:45:07.673899 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:45:07 crc kubenswrapper[4783]: I0930 14:45:07.673994 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:45:07 crc kubenswrapper[4783]: I0930 14:45:07.674061 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:45:07 crc kubenswrapper[4783]: I0930 14:45:07.675112 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:45:07 crc kubenswrapper[4783]: I0930 14:45:07.675210 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da" gracePeriod=600 Sep 30 14:45:08 crc kubenswrapper[4783]: I0930 14:45:08.190788 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da" exitCode=0 Sep 30 14:45:08 crc kubenswrapper[4783]: I0930 14:45:08.190878 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da"} Sep 30 14:45:08 crc kubenswrapper[4783]: I0930 14:45:08.191144 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587"} Sep 30 14:45:08 crc kubenswrapper[4783]: I0930 14:45:08.191173 4783 scope.go:117] "RemoveContainer" containerID="7539293ba3d67837d7ef0be48f1318911d9659ec49acd70f49d01b45a435e907" Sep 30 14:45:18 crc kubenswrapper[4783]: I0930 14:45:18.390141 4783 scope.go:117] "RemoveContainer" containerID="a4f40b91e052d74d21f2dc438123afc52774fed053a4b215f1f46720fa8edbc1" Sep 30 14:47:37 crc kubenswrapper[4783]: I0930 14:47:37.673979 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:47:37 crc kubenswrapper[4783]: I0930 14:47:37.674700 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:48:07 crc kubenswrapper[4783]: I0930 14:48:07.673583 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:48:07 crc kubenswrapper[4783]: I0930 14:48:07.674415 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.070753 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:10 crc kubenswrapper[4783]: E0930 14:48:10.071346 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f943991-a019-456a-baad-94f5e8104f00" containerName="collect-profiles" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.071357 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f943991-a019-456a-baad-94f5e8104f00" containerName="collect-profiles" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.071506 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f943991-a019-456a-baad-94f5e8104f00" containerName="collect-profiles" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.074040 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.092153 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.178313 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9xsk\" (UniqueName: \"kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.178408 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.178482 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.279953 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.280071 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.280125 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9xsk\" (UniqueName: \"kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.280517 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.281027 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.314042 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9xsk\" (UniqueName: \"kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk\") pod \"redhat-marketplace-xqf7d\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.418132 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:10 crc kubenswrapper[4783]: I0930 14:48:10.783802 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:11 crc kubenswrapper[4783]: E0930 14:48:11.031118 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47418860_de09_49b5_8c39_13975395db73.slice/crio-conmon-43415eac77c9b5d5b0544711a01a1e2413586b1e5b160927cdaf701143c0cc50.scope\": RecentStats: unable to find data in memory cache]" Sep 30 14:48:11 crc kubenswrapper[4783]: I0930 14:48:11.797674 4783 generic.go:334] "Generic (PLEG): container finished" podID="47418860-de09-49b5-8c39-13975395db73" containerID="43415eac77c9b5d5b0544711a01a1e2413586b1e5b160927cdaf701143c0cc50" exitCode=0 Sep 30 14:48:11 crc kubenswrapper[4783]: I0930 14:48:11.797720 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerDied","Data":"43415eac77c9b5d5b0544711a01a1e2413586b1e5b160927cdaf701143c0cc50"} Sep 30 14:48:11 crc kubenswrapper[4783]: I0930 14:48:11.797986 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerStarted","Data":"44b3af27fd7943b5a1e96ece613b90cfd14dae20377abcbbc44fe0f785e70e91"} Sep 30 14:48:11 crc kubenswrapper[4783]: I0930 14:48:11.801990 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:48:13 crc kubenswrapper[4783]: I0930 14:48:13.816356 4783 generic.go:334] "Generic (PLEG): container finished" podID="47418860-de09-49b5-8c39-13975395db73" containerID="f3ef7a2170cf7ffa2369d57db22a0b69e1e33b291d8a8a394c97585c0fb365cb" exitCode=0 Sep 30 14:48:13 crc kubenswrapper[4783]: I0930 14:48:13.816417 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerDied","Data":"f3ef7a2170cf7ffa2369d57db22a0b69e1e33b291d8a8a394c97585c0fb365cb"} Sep 30 14:48:14 crc kubenswrapper[4783]: I0930 14:48:14.828300 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerStarted","Data":"55a329395bbad64be41d596c738917733a4dd0d8870bf89bdec541b72c44272d"} Sep 30 14:48:14 crc kubenswrapper[4783]: I0930 14:48:14.852295 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xqf7d" podStartSLOduration=2.167012121 podStartE2EDuration="4.852270855s" podCreationTimestamp="2025-09-30 14:48:10 +0000 UTC" firstStartedPulling="2025-09-30 14:48:11.801417921 +0000 UTC m=+4391.732884278" lastFinishedPulling="2025-09-30 14:48:14.486676705 +0000 UTC m=+4394.418143012" observedRunningTime="2025-09-30 14:48:14.849481226 +0000 UTC m=+4394.780947553" watchObservedRunningTime="2025-09-30 14:48:14.852270855 +0000 UTC m=+4394.783737182" Sep 30 14:48:20 crc kubenswrapper[4783]: I0930 14:48:20.419256 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:20 crc kubenswrapper[4783]: I0930 14:48:20.419875 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:20 crc kubenswrapper[4783]: I0930 14:48:20.487286 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:20 crc kubenswrapper[4783]: I0930 14:48:20.934251 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:20 crc kubenswrapper[4783]: I0930 14:48:20.987294 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:22 crc kubenswrapper[4783]: I0930 14:48:22.902109 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xqf7d" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="registry-server" containerID="cri-o://55a329395bbad64be41d596c738917733a4dd0d8870bf89bdec541b72c44272d" gracePeriod=2 Sep 30 14:48:23 crc kubenswrapper[4783]: I0930 14:48:23.918556 4783 generic.go:334] "Generic (PLEG): container finished" podID="47418860-de09-49b5-8c39-13975395db73" containerID="55a329395bbad64be41d596c738917733a4dd0d8870bf89bdec541b72c44272d" exitCode=0 Sep 30 14:48:23 crc kubenswrapper[4783]: I0930 14:48:23.918648 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerDied","Data":"55a329395bbad64be41d596c738917733a4dd0d8870bf89bdec541b72c44272d"} Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.156592 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.312586 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content\") pod \"47418860-de09-49b5-8c39-13975395db73\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.312704 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9xsk\" (UniqueName: \"kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk\") pod \"47418860-de09-49b5-8c39-13975395db73\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.312737 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities\") pod \"47418860-de09-49b5-8c39-13975395db73\" (UID: \"47418860-de09-49b5-8c39-13975395db73\") " Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.313935 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities" (OuterVolumeSpecName: "utilities") pod "47418860-de09-49b5-8c39-13975395db73" (UID: "47418860-de09-49b5-8c39-13975395db73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.321026 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk" (OuterVolumeSpecName: "kube-api-access-x9xsk") pod "47418860-de09-49b5-8c39-13975395db73" (UID: "47418860-de09-49b5-8c39-13975395db73"). InnerVolumeSpecName "kube-api-access-x9xsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.415202 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9xsk\" (UniqueName: \"kubernetes.io/projected/47418860-de09-49b5-8c39-13975395db73-kube-api-access-x9xsk\") on node \"crc\" DevicePath \"\"" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.415260 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.930908 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqf7d" event={"ID":"47418860-de09-49b5-8c39-13975395db73","Type":"ContainerDied","Data":"44b3af27fd7943b5a1e96ece613b90cfd14dae20377abcbbc44fe0f785e70e91"} Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.930967 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqf7d" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.931002 4783 scope.go:117] "RemoveContainer" containerID="55a329395bbad64be41d596c738917733a4dd0d8870bf89bdec541b72c44272d" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.955680 4783 scope.go:117] "RemoveContainer" containerID="f3ef7a2170cf7ffa2369d57db22a0b69e1e33b291d8a8a394c97585c0fb365cb" Sep 30 14:48:24 crc kubenswrapper[4783]: I0930 14:48:24.974110 4783 scope.go:117] "RemoveContainer" containerID="43415eac77c9b5d5b0544711a01a1e2413586b1e5b160927cdaf701143c0cc50" Sep 30 14:48:25 crc kubenswrapper[4783]: I0930 14:48:25.459284 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47418860-de09-49b5-8c39-13975395db73" (UID: "47418860-de09-49b5-8c39-13975395db73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:48:25 crc kubenswrapper[4783]: I0930 14:48:25.531672 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47418860-de09-49b5-8c39-13975395db73-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:48:25 crc kubenswrapper[4783]: I0930 14:48:25.576528 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:25 crc kubenswrapper[4783]: I0930 14:48:25.587027 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqf7d"] Sep 30 14:48:26 crc kubenswrapper[4783]: I0930 14:48:26.860001 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47418860-de09-49b5-8c39-13975395db73" path="/var/lib/kubelet/pods/47418860-de09-49b5-8c39-13975395db73/volumes" Sep 30 14:48:37 crc kubenswrapper[4783]: I0930 14:48:37.673716 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:48:37 crc kubenswrapper[4783]: I0930 14:48:37.674401 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:48:37 crc kubenswrapper[4783]: I0930 14:48:37.674479 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:48:37 crc kubenswrapper[4783]: I0930 14:48:37.676070 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:48:37 crc kubenswrapper[4783]: I0930 14:48:37.676173 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" gracePeriod=600 Sep 30 14:48:37 crc kubenswrapper[4783]: E0930 14:48:37.816937 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:48:38 crc kubenswrapper[4783]: I0930 14:48:38.041381 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" exitCode=0 Sep 30 14:48:38 crc kubenswrapper[4783]: I0930 14:48:38.041677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587"} Sep 30 14:48:38 crc kubenswrapper[4783]: I0930 14:48:38.041820 4783 scope.go:117] "RemoveContainer" containerID="e247d70f2ad435805d4e436da73c2d3075b430b00f2273ddfdf8ad883f33a1da" Sep 30 14:48:38 crc kubenswrapper[4783]: I0930 14:48:38.042434 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:48:38 crc kubenswrapper[4783]: E0930 14:48:38.042681 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:48:49 crc kubenswrapper[4783]: I0930 14:48:49.843035 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:48:49 crc kubenswrapper[4783]: E0930 14:48:49.843683 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:49:03 crc kubenswrapper[4783]: I0930 14:49:03.842700 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:49:03 crc kubenswrapper[4783]: E0930 14:49:03.843523 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:49:16 crc kubenswrapper[4783]: I0930 14:49:16.843692 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:49:16 crc kubenswrapper[4783]: E0930 14:49:16.845207 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:49:28 crc kubenswrapper[4783]: I0930 14:49:28.843147 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:49:28 crc kubenswrapper[4783]: E0930 14:49:28.844110 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:49:42 crc kubenswrapper[4783]: I0930 14:49:42.842628 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:49:42 crc kubenswrapper[4783]: E0930 14:49:42.843368 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:49:57 crc kubenswrapper[4783]: I0930 14:49:57.843961 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:49:57 crc kubenswrapper[4783]: E0930 14:49:57.844815 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:50:12 crc kubenswrapper[4783]: I0930 14:50:12.843923 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:50:12 crc kubenswrapper[4783]: E0930 14:50:12.844977 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:50:24 crc kubenswrapper[4783]: I0930 14:50:24.843472 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:50:24 crc kubenswrapper[4783]: E0930 14:50:24.844402 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:50:38 crc kubenswrapper[4783]: I0930 14:50:38.843485 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:50:38 crc kubenswrapper[4783]: E0930 14:50:38.844183 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:50:52 crc kubenswrapper[4783]: I0930 14:50:52.843988 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:50:52 crc kubenswrapper[4783]: E0930 14:50:52.844927 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.015927 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:06 crc kubenswrapper[4783]: E0930 14:51:06.016755 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="extract-content" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.016770 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="extract-content" Sep 30 14:51:06 crc kubenswrapper[4783]: E0930 14:51:06.016789 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="registry-server" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.016797 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="registry-server" Sep 30 14:51:06 crc kubenswrapper[4783]: E0930 14:51:06.016819 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="extract-utilities" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.016828 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="extract-utilities" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.016954 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="47418860-de09-49b5-8c39-13975395db73" containerName="registry-server" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.018080 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.031113 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.084012 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d2w4\" (UniqueName: \"kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.084058 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.084114 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.185171 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d2w4\" (UniqueName: \"kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.185374 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.185429 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.185935 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.186565 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.203357 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d2w4\" (UniqueName: \"kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4\") pod \"certified-operators-hkpck\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.342972 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:06 crc kubenswrapper[4783]: I0930 14:51:06.798137 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:07 crc kubenswrapper[4783]: I0930 14:51:07.319337 4783 generic.go:334] "Generic (PLEG): container finished" podID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerID="e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099" exitCode=0 Sep 30 14:51:07 crc kubenswrapper[4783]: I0930 14:51:07.319436 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerDied","Data":"e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099"} Sep 30 14:51:07 crc kubenswrapper[4783]: I0930 14:51:07.319602 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerStarted","Data":"184594858bc93b59225476f04ea8cba850f2a0b8bbf4499ed186627f3cbeabd1"} Sep 30 14:51:07 crc kubenswrapper[4783]: I0930 14:51:07.843159 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:51:07 crc kubenswrapper[4783]: E0930 14:51:07.843405 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:51:08 crc kubenswrapper[4783]: I0930 14:51:08.330115 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerStarted","Data":"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185"} Sep 30 14:51:09 crc kubenswrapper[4783]: I0930 14:51:09.338657 4783 generic.go:334] "Generic (PLEG): container finished" podID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerID="4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185" exitCode=0 Sep 30 14:51:09 crc kubenswrapper[4783]: I0930 14:51:09.338700 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerDied","Data":"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185"} Sep 30 14:51:11 crc kubenswrapper[4783]: I0930 14:51:11.354590 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerStarted","Data":"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec"} Sep 30 14:51:11 crc kubenswrapper[4783]: I0930 14:51:11.374040 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hkpck" podStartSLOduration=3.49207447 podStartE2EDuration="6.374021798s" podCreationTimestamp="2025-09-30 14:51:05 +0000 UTC" firstStartedPulling="2025-09-30 14:51:07.321321379 +0000 UTC m=+4567.252787676" lastFinishedPulling="2025-09-30 14:51:10.203268697 +0000 UTC m=+4570.134735004" observedRunningTime="2025-09-30 14:51:11.371705434 +0000 UTC m=+4571.303171741" watchObservedRunningTime="2025-09-30 14:51:11.374021798 +0000 UTC m=+4571.305488105" Sep 30 14:51:16 crc kubenswrapper[4783]: I0930 14:51:16.343864 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:16 crc kubenswrapper[4783]: I0930 14:51:16.345029 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:16 crc kubenswrapper[4783]: I0930 14:51:16.407508 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:16 crc kubenswrapper[4783]: I0930 14:51:16.482779 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:16 crc kubenswrapper[4783]: I0930 14:51:16.657654 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.433536 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hkpck" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="registry-server" containerID="cri-o://93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec" gracePeriod=2 Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.844883 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:51:18 crc kubenswrapper[4783]: E0930 14:51:18.845831 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.869647 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.883638 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities\") pod \"ec41413c-d5ce-41b7-91f4-52ce0982905d\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.883776 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content\") pod \"ec41413c-d5ce-41b7-91f4-52ce0982905d\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.883982 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5d2w4\" (UniqueName: \"kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4\") pod \"ec41413c-d5ce-41b7-91f4-52ce0982905d\" (UID: \"ec41413c-d5ce-41b7-91f4-52ce0982905d\") " Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.888048 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities" (OuterVolumeSpecName: "utilities") pod "ec41413c-d5ce-41b7-91f4-52ce0982905d" (UID: "ec41413c-d5ce-41b7-91f4-52ce0982905d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.905922 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4" (OuterVolumeSpecName: "kube-api-access-5d2w4") pod "ec41413c-d5ce-41b7-91f4-52ce0982905d" (UID: "ec41413c-d5ce-41b7-91f4-52ce0982905d"). InnerVolumeSpecName "kube-api-access-5d2w4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.965176 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec41413c-d5ce-41b7-91f4-52ce0982905d" (UID: "ec41413c-d5ce-41b7-91f4-52ce0982905d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.985514 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.985553 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec41413c-d5ce-41b7-91f4-52ce0982905d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:51:18 crc kubenswrapper[4783]: I0930 14:51:18.985565 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5d2w4\" (UniqueName: \"kubernetes.io/projected/ec41413c-d5ce-41b7-91f4-52ce0982905d-kube-api-access-5d2w4\") on node \"crc\" DevicePath \"\"" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.446522 4783 generic.go:334] "Generic (PLEG): container finished" podID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerID="93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec" exitCode=0 Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.446601 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerDied","Data":"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec"} Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.446634 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hkpck" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.446652 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hkpck" event={"ID":"ec41413c-d5ce-41b7-91f4-52ce0982905d","Type":"ContainerDied","Data":"184594858bc93b59225476f04ea8cba850f2a0b8bbf4499ed186627f3cbeabd1"} Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.446690 4783 scope.go:117] "RemoveContainer" containerID="93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.487316 4783 scope.go:117] "RemoveContainer" containerID="4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.496880 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.502689 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hkpck"] Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.507091 4783 scope.go:117] "RemoveContainer" containerID="e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.550692 4783 scope.go:117] "RemoveContainer" containerID="93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec" Sep 30 14:51:19 crc kubenswrapper[4783]: E0930 14:51:19.551614 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec\": container with ID starting with 93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec not found: ID does not exist" containerID="93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.551695 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec"} err="failed to get container status \"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec\": rpc error: code = NotFound desc = could not find container \"93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec\": container with ID starting with 93e06ff94fa04811745d25b05fac109ddcd44276bfa3e0b17ab3249805d7a8ec not found: ID does not exist" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.551749 4783 scope.go:117] "RemoveContainer" containerID="4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185" Sep 30 14:51:19 crc kubenswrapper[4783]: E0930 14:51:19.552559 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185\": container with ID starting with 4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185 not found: ID does not exist" containerID="4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.552611 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185"} err="failed to get container status \"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185\": rpc error: code = NotFound desc = could not find container \"4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185\": container with ID starting with 4851ce7b90e5f307231b591dd9072c9748177662e720d0e5ae7eb6b1a9bb1185 not found: ID does not exist" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.552708 4783 scope.go:117] "RemoveContainer" containerID="e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099" Sep 30 14:51:19 crc kubenswrapper[4783]: E0930 14:51:19.553424 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099\": container with ID starting with e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099 not found: ID does not exist" containerID="e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099" Sep 30 14:51:19 crc kubenswrapper[4783]: I0930 14:51:19.553469 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099"} err="failed to get container status \"e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099\": rpc error: code = NotFound desc = could not find container \"e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099\": container with ID starting with e372fe2e4c2422e15cc9023b787bff42e580c7dd9f658810d832ad8bfb523099 not found: ID does not exist" Sep 30 14:51:20 crc kubenswrapper[4783]: I0930 14:51:20.858964 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" path="/var/lib/kubelet/pods/ec41413c-d5ce-41b7-91f4-52ce0982905d/volumes" Sep 30 14:51:29 crc kubenswrapper[4783]: I0930 14:51:29.843546 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:51:29 crc kubenswrapper[4783]: E0930 14:51:29.844495 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:51:44 crc kubenswrapper[4783]: I0930 14:51:44.843446 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:51:44 crc kubenswrapper[4783]: E0930 14:51:44.844167 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:51:55 crc kubenswrapper[4783]: I0930 14:51:55.843660 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:51:55 crc kubenswrapper[4783]: E0930 14:51:55.844657 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:52:10 crc kubenswrapper[4783]: I0930 14:52:10.847503 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:52:10 crc kubenswrapper[4783]: E0930 14:52:10.848277 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:52:21 crc kubenswrapper[4783]: I0930 14:52:21.842969 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:52:21 crc kubenswrapper[4783]: E0930 14:52:21.843721 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:52:32 crc kubenswrapper[4783]: I0930 14:52:32.843061 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:52:32 crc kubenswrapper[4783]: E0930 14:52:32.844323 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:52:47 crc kubenswrapper[4783]: I0930 14:52:47.843717 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:52:47 crc kubenswrapper[4783]: E0930 14:52:47.844715 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:52:59 crc kubenswrapper[4783]: I0930 14:52:59.843905 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:52:59 crc kubenswrapper[4783]: E0930 14:52:59.845187 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:53:10 crc kubenswrapper[4783]: I0930 14:53:10.852092 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:53:10 crc kubenswrapper[4783]: E0930 14:53:10.853328 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.907549 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:23 crc kubenswrapper[4783]: E0930 14:53:23.908743 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="extract-content" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.908758 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="extract-content" Sep 30 14:53:23 crc kubenswrapper[4783]: E0930 14:53:23.908784 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="registry-server" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.908792 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="registry-server" Sep 30 14:53:23 crc kubenswrapper[4783]: E0930 14:53:23.908808 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="extract-utilities" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.908815 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="extract-utilities" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.908952 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec41413c-d5ce-41b7-91f4-52ce0982905d" containerName="registry-server" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.910006 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:23 crc kubenswrapper[4783]: I0930 14:53:23.924360 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.010179 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.010255 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.010273 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t7v6\" (UniqueName: \"kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.112345 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.112419 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.112446 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t7v6\" (UniqueName: \"kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.113093 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.113170 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.131007 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t7v6\" (UniqueName: \"kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6\") pod \"redhat-operators-fdcqw\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.247013 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.745663 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:24 crc kubenswrapper[4783]: I0930 14:53:24.843026 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:53:24 crc kubenswrapper[4783]: E0930 14:53:24.843234 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:53:25 crc kubenswrapper[4783]: I0930 14:53:25.659922 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerID="f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774" exitCode=0 Sep 30 14:53:25 crc kubenswrapper[4783]: I0930 14:53:25.659979 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerDied","Data":"f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774"} Sep 30 14:53:25 crc kubenswrapper[4783]: I0930 14:53:25.660241 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerStarted","Data":"8c48692d3c41ae05b144fc91920b8c6bf8a80392ba3326622a36e58e12deb2b6"} Sep 30 14:53:25 crc kubenswrapper[4783]: I0930 14:53:25.661575 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:53:27 crc kubenswrapper[4783]: E0930 14:53:27.420711 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c59f0cc_0844_4155_a2bc_5e7792c8c25b.slice/crio-83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8.scope\": RecentStats: unable to find data in memory cache]" Sep 30 14:53:27 crc kubenswrapper[4783]: I0930 14:53:27.679820 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerID="83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8" exitCode=0 Sep 30 14:53:27 crc kubenswrapper[4783]: I0930 14:53:27.679872 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerDied","Data":"83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8"} Sep 30 14:53:28 crc kubenswrapper[4783]: I0930 14:53:28.687867 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerStarted","Data":"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db"} Sep 30 14:53:28 crc kubenswrapper[4783]: I0930 14:53:28.711613 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fdcqw" podStartSLOduration=3.286021351 podStartE2EDuration="5.71159778s" podCreationTimestamp="2025-09-30 14:53:23 +0000 UTC" firstStartedPulling="2025-09-30 14:53:25.661258749 +0000 UTC m=+4705.592725056" lastFinishedPulling="2025-09-30 14:53:28.086835178 +0000 UTC m=+4708.018301485" observedRunningTime="2025-09-30 14:53:28.70534753 +0000 UTC m=+4708.636813837" watchObservedRunningTime="2025-09-30 14:53:28.71159778 +0000 UTC m=+4708.643064077" Sep 30 14:53:34 crc kubenswrapper[4783]: I0930 14:53:34.248438 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:34 crc kubenswrapper[4783]: I0930 14:53:34.249019 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:34 crc kubenswrapper[4783]: I0930 14:53:34.288804 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:35 crc kubenswrapper[4783]: I0930 14:53:35.120389 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:35 crc kubenswrapper[4783]: I0930 14:53:35.843110 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:53:35 crc kubenswrapper[4783]: E0930 14:53:35.844297 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 14:53:36 crc kubenswrapper[4783]: I0930 14:53:36.091984 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:36 crc kubenswrapper[4783]: I0930 14:53:36.744937 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fdcqw" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="registry-server" containerID="cri-o://298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db" gracePeriod=2 Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.153896 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.203606 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content\") pod \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.203723 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities\") pod \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.203884 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t7v6\" (UniqueName: \"kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6\") pod \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\" (UID: \"6c59f0cc-0844-4155-a2bc-5e7792c8c25b\") " Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.206288 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities" (OuterVolumeSpecName: "utilities") pod "6c59f0cc-0844-4155-a2bc-5e7792c8c25b" (UID: "6c59f0cc-0844-4155-a2bc-5e7792c8c25b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.209315 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6" (OuterVolumeSpecName: "kube-api-access-2t7v6") pod "6c59f0cc-0844-4155-a2bc-5e7792c8c25b" (UID: "6c59f0cc-0844-4155-a2bc-5e7792c8c25b"). InnerVolumeSpecName "kube-api-access-2t7v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.292874 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c59f0cc-0844-4155-a2bc-5e7792c8c25b" (UID: "6c59f0cc-0844-4155-a2bc-5e7792c8c25b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.306005 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.306043 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t7v6\" (UniqueName: \"kubernetes.io/projected/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-kube-api-access-2t7v6\") on node \"crc\" DevicePath \"\"" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.306058 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c59f0cc-0844-4155-a2bc-5e7792c8c25b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.753702 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerID="298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db" exitCode=0 Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.753745 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerDied","Data":"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db"} Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.753779 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdcqw" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.753800 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdcqw" event={"ID":"6c59f0cc-0844-4155-a2bc-5e7792c8c25b","Type":"ContainerDied","Data":"8c48692d3c41ae05b144fc91920b8c6bf8a80392ba3326622a36e58e12deb2b6"} Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.753826 4783 scope.go:117] "RemoveContainer" containerID="298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.768938 4783 scope.go:117] "RemoveContainer" containerID="83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.783320 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.786808 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fdcqw"] Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.810410 4783 scope.go:117] "RemoveContainer" containerID="f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.825670 4783 scope.go:117] "RemoveContainer" containerID="298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db" Sep 30 14:53:37 crc kubenswrapper[4783]: E0930 14:53:37.826084 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db\": container with ID starting with 298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db not found: ID does not exist" containerID="298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.826129 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db"} err="failed to get container status \"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db\": rpc error: code = NotFound desc = could not find container \"298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db\": container with ID starting with 298172a06fa8f258bd91b39718ebda129e35e280f55d8872c4cf613a0dc590db not found: ID does not exist" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.826157 4783 scope.go:117] "RemoveContainer" containerID="83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8" Sep 30 14:53:37 crc kubenswrapper[4783]: E0930 14:53:37.826535 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8\": container with ID starting with 83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8 not found: ID does not exist" containerID="83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.826569 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8"} err="failed to get container status \"83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8\": rpc error: code = NotFound desc = could not find container \"83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8\": container with ID starting with 83c98e83c474f8962ed1fa2fef287d9ff23daf37ff82d48d49b696709a46a3f8 not found: ID does not exist" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.826589 4783 scope.go:117] "RemoveContainer" containerID="f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774" Sep 30 14:53:37 crc kubenswrapper[4783]: E0930 14:53:37.826904 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774\": container with ID starting with f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774 not found: ID does not exist" containerID="f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774" Sep 30 14:53:37 crc kubenswrapper[4783]: I0930 14:53:37.826925 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774"} err="failed to get container status \"f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774\": rpc error: code = NotFound desc = could not find container \"f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774\": container with ID starting with f07b158a60eaf30f8f5cf134524fa07a27a60e69aeb1d808b37b53ee3a639774 not found: ID does not exist" Sep 30 14:53:38 crc kubenswrapper[4783]: I0930 14:53:38.854477 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" path="/var/lib/kubelet/pods/6c59f0cc-0844-4155-a2bc-5e7792c8c25b/volumes" Sep 30 14:53:50 crc kubenswrapper[4783]: I0930 14:53:50.847352 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:53:51 crc kubenswrapper[4783]: I0930 14:53:51.892905 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4"} Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.285927 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:32 crc kubenswrapper[4783]: E0930 14:54:32.286780 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="registry-server" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.286793 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="registry-server" Sep 30 14:54:32 crc kubenswrapper[4783]: E0930 14:54:32.286815 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="extract-utilities" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.286822 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="extract-utilities" Sep 30 14:54:32 crc kubenswrapper[4783]: E0930 14:54:32.286835 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="extract-content" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.286842 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="extract-content" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.287021 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c59f0cc-0844-4155-a2bc-5e7792c8c25b" containerName="registry-server" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.288026 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.294719 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.332073 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.332498 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxs9s\" (UniqueName: \"kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.332539 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.433362 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.433533 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxs9s\" (UniqueName: \"kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.433593 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.434045 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.434557 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.452810 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxs9s\" (UniqueName: \"kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s\") pod \"community-operators-xh6pd\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:32 crc kubenswrapper[4783]: I0930 14:54:32.645894 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:33 crc kubenswrapper[4783]: I0930 14:54:33.162804 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:33 crc kubenswrapper[4783]: I0930 14:54:33.242570 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerStarted","Data":"f0cf6ffeecb4b950ad6202e8a0dc85581bad97487a52510c7761e3adb16a9bb7"} Sep 30 14:54:34 crc kubenswrapper[4783]: I0930 14:54:34.252759 4783 generic.go:334] "Generic (PLEG): container finished" podID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerID="6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25" exitCode=0 Sep 30 14:54:34 crc kubenswrapper[4783]: I0930 14:54:34.252865 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerDied","Data":"6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25"} Sep 30 14:54:35 crc kubenswrapper[4783]: I0930 14:54:35.263588 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerStarted","Data":"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774"} Sep 30 14:54:36 crc kubenswrapper[4783]: I0930 14:54:36.275464 4783 generic.go:334] "Generic (PLEG): container finished" podID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerID="1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774" exitCode=0 Sep 30 14:54:36 crc kubenswrapper[4783]: I0930 14:54:36.275585 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerDied","Data":"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774"} Sep 30 14:54:37 crc kubenswrapper[4783]: I0930 14:54:37.284240 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerStarted","Data":"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd"} Sep 30 14:54:37 crc kubenswrapper[4783]: I0930 14:54:37.305549 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xh6pd" podStartSLOduration=2.838078304 podStartE2EDuration="5.305526942s" podCreationTimestamp="2025-09-30 14:54:32 +0000 UTC" firstStartedPulling="2025-09-30 14:54:34.254542911 +0000 UTC m=+4774.186009218" lastFinishedPulling="2025-09-30 14:54:36.721991529 +0000 UTC m=+4776.653457856" observedRunningTime="2025-09-30 14:54:37.297799704 +0000 UTC m=+4777.229266011" watchObservedRunningTime="2025-09-30 14:54:37.305526942 +0000 UTC m=+4777.236993249" Sep 30 14:54:42 crc kubenswrapper[4783]: I0930 14:54:42.646401 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:42 crc kubenswrapper[4783]: I0930 14:54:42.646953 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:42 crc kubenswrapper[4783]: I0930 14:54:42.704544 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:43 crc kubenswrapper[4783]: I0930 14:54:43.416392 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:43 crc kubenswrapper[4783]: I0930 14:54:43.483364 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.349642 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xh6pd" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="registry-server" containerID="cri-o://75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd" gracePeriod=2 Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.773589 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.956125 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content\") pod \"112be4e0-0a95-4bb4-9b91-48f687c91117\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.956177 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities\") pod \"112be4e0-0a95-4bb4-9b91-48f687c91117\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.956200 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxs9s\" (UniqueName: \"kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s\") pod \"112be4e0-0a95-4bb4-9b91-48f687c91117\" (UID: \"112be4e0-0a95-4bb4-9b91-48f687c91117\") " Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.957991 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities" (OuterVolumeSpecName: "utilities") pod "112be4e0-0a95-4bb4-9b91-48f687c91117" (UID: "112be4e0-0a95-4bb4-9b91-48f687c91117"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:54:45 crc kubenswrapper[4783]: I0930 14:54:45.969644 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s" (OuterVolumeSpecName: "kube-api-access-zxs9s") pod "112be4e0-0a95-4bb4-9b91-48f687c91117" (UID: "112be4e0-0a95-4bb4-9b91-48f687c91117"). InnerVolumeSpecName "kube-api-access-zxs9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.058503 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.058576 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxs9s\" (UniqueName: \"kubernetes.io/projected/112be4e0-0a95-4bb4-9b91-48f687c91117-kube-api-access-zxs9s\") on node \"crc\" DevicePath \"\"" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.195647 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "112be4e0-0a95-4bb4-9b91-48f687c91117" (UID: "112be4e0-0a95-4bb4-9b91-48f687c91117"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.261810 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/112be4e0-0a95-4bb4-9b91-48f687c91117-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.363507 4783 generic.go:334] "Generic (PLEG): container finished" podID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerID="75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd" exitCode=0 Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.363580 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerDied","Data":"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd"} Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.363600 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xh6pd" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.363634 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xh6pd" event={"ID":"112be4e0-0a95-4bb4-9b91-48f687c91117","Type":"ContainerDied","Data":"f0cf6ffeecb4b950ad6202e8a0dc85581bad97487a52510c7761e3adb16a9bb7"} Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.363672 4783 scope.go:117] "RemoveContainer" containerID="75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.396460 4783 scope.go:117] "RemoveContainer" containerID="1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.413497 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.419769 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xh6pd"] Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.442745 4783 scope.go:117] "RemoveContainer" containerID="6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.475435 4783 scope.go:117] "RemoveContainer" containerID="75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd" Sep 30 14:54:46 crc kubenswrapper[4783]: E0930 14:54:46.476185 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd\": container with ID starting with 75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd not found: ID does not exist" containerID="75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.476254 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd"} err="failed to get container status \"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd\": rpc error: code = NotFound desc = could not find container \"75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd\": container with ID starting with 75225b60636c2eb190d7d378cdf839243608f565ba85230b393d4a965f7a98fd not found: ID does not exist" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.476296 4783 scope.go:117] "RemoveContainer" containerID="1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774" Sep 30 14:54:46 crc kubenswrapper[4783]: E0930 14:54:46.476698 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774\": container with ID starting with 1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774 not found: ID does not exist" containerID="1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.476875 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774"} err="failed to get container status \"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774\": rpc error: code = NotFound desc = could not find container \"1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774\": container with ID starting with 1b322671b7f7991488d0701963b669b0171b01265c5158c42a7c46f7952f1774 not found: ID does not exist" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.477026 4783 scope.go:117] "RemoveContainer" containerID="6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25" Sep 30 14:54:46 crc kubenswrapper[4783]: E0930 14:54:46.477602 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25\": container with ID starting with 6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25 not found: ID does not exist" containerID="6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.477837 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25"} err="failed to get container status \"6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25\": rpc error: code = NotFound desc = could not find container \"6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25\": container with ID starting with 6c14eaffe0a8dc7222ee55c7f81dc11689fcb0c182659df1d682a6e024221e25 not found: ID does not exist" Sep 30 14:54:46 crc kubenswrapper[4783]: I0930 14:54:46.863190 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" path="/var/lib/kubelet/pods/112be4e0-0a95-4bb4-9b91-48f687c91117/volumes" Sep 30 14:56:07 crc kubenswrapper[4783]: I0930 14:56:07.673705 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:56:07 crc kubenswrapper[4783]: I0930 14:56:07.674358 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.262931 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-tbdk6"] Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.267547 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-tbdk6"] Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.415515 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-lktx4"] Sep 30 14:56:13 crc kubenswrapper[4783]: E0930 14:56:13.415940 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="extract-utilities" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.415971 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="extract-utilities" Sep 30 14:56:13 crc kubenswrapper[4783]: E0930 14:56:13.415996 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="registry-server" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.416009 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="registry-server" Sep 30 14:56:13 crc kubenswrapper[4783]: E0930 14:56:13.416034 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="extract-content" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.416047 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="extract-content" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.416351 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="112be4e0-0a95-4bb4-9b91-48f687c91117" containerName="registry-server" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.417070 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.419973 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.420629 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.422093 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.422569 4783 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k2c62" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.423566 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lktx4"] Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.614972 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.615107 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7tzw\" (UniqueName: \"kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.615161 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.716820 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.717007 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7tzw\" (UniqueName: \"kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.717096 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.717649 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.719232 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:13 crc kubenswrapper[4783]: I0930 14:56:13.751690 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7tzw\" (UniqueName: \"kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw\") pod \"crc-storage-crc-lktx4\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:14 crc kubenswrapper[4783]: I0930 14:56:14.037327 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:14 crc kubenswrapper[4783]: I0930 14:56:14.489568 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lktx4"] Sep 30 14:56:14 crc kubenswrapper[4783]: I0930 14:56:14.860111 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c00828-b66d-446c-8c0d-c43dc287fae9" path="/var/lib/kubelet/pods/54c00828-b66d-446c-8c0d-c43dc287fae9/volumes" Sep 30 14:56:15 crc kubenswrapper[4783]: I0930 14:56:15.120395 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lktx4" event={"ID":"d59baeb0-565b-4feb-8689-8fc91e7e5176","Type":"ContainerStarted","Data":"c2cb8b55185ce081dd8ef19ae64b27cf5e83fe379ae6fd6687fb8d80bf6403c4"} Sep 30 14:56:16 crc kubenswrapper[4783]: I0930 14:56:16.137265 4783 generic.go:334] "Generic (PLEG): container finished" podID="d59baeb0-565b-4feb-8689-8fc91e7e5176" containerID="0a26d4d43a48344ce605a8b6a0205ac4f4e3076546e09740b36f8eb6cb764505" exitCode=0 Sep 30 14:56:16 crc kubenswrapper[4783]: I0930 14:56:16.137803 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lktx4" event={"ID":"d59baeb0-565b-4feb-8689-8fc91e7e5176","Type":"ContainerDied","Data":"0a26d4d43a48344ce605a8b6a0205ac4f4e3076546e09740b36f8eb6cb764505"} Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.422396 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.584085 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage\") pod \"d59baeb0-565b-4feb-8689-8fc91e7e5176\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.584163 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7tzw\" (UniqueName: \"kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw\") pod \"d59baeb0-565b-4feb-8689-8fc91e7e5176\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.584223 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt\") pod \"d59baeb0-565b-4feb-8689-8fc91e7e5176\" (UID: \"d59baeb0-565b-4feb-8689-8fc91e7e5176\") " Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.584530 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d59baeb0-565b-4feb-8689-8fc91e7e5176" (UID: "d59baeb0-565b-4feb-8689-8fc91e7e5176"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.584985 4783 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d59baeb0-565b-4feb-8689-8fc91e7e5176-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.589404 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw" (OuterVolumeSpecName: "kube-api-access-b7tzw") pod "d59baeb0-565b-4feb-8689-8fc91e7e5176" (UID: "d59baeb0-565b-4feb-8689-8fc91e7e5176"). InnerVolumeSpecName "kube-api-access-b7tzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.616647 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d59baeb0-565b-4feb-8689-8fc91e7e5176" (UID: "d59baeb0-565b-4feb-8689-8fc91e7e5176"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.686989 4783 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d59baeb0-565b-4feb-8689-8fc91e7e5176-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:17 crc kubenswrapper[4783]: I0930 14:56:17.687052 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7tzw\" (UniqueName: \"kubernetes.io/projected/d59baeb0-565b-4feb-8689-8fc91e7e5176-kube-api-access-b7tzw\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:18 crc kubenswrapper[4783]: I0930 14:56:18.156051 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lktx4" event={"ID":"d59baeb0-565b-4feb-8689-8fc91e7e5176","Type":"ContainerDied","Data":"c2cb8b55185ce081dd8ef19ae64b27cf5e83fe379ae6fd6687fb8d80bf6403c4"} Sep 30 14:56:18 crc kubenswrapper[4783]: I0930 14:56:18.156095 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2cb8b55185ce081dd8ef19ae64b27cf5e83fe379ae6fd6687fb8d80bf6403c4" Sep 30 14:56:18 crc kubenswrapper[4783]: I0930 14:56:18.156179 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lktx4" Sep 30 14:56:18 crc kubenswrapper[4783]: I0930 14:56:18.678795 4783 scope.go:117] "RemoveContainer" containerID="d10c78d037207c4478abdea8a14b2538bb54b5e8cc77391b9b9b895af7100db0" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.723489 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-lktx4"] Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.731481 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-lktx4"] Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.869904 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-lbsnq"] Sep 30 14:56:19 crc kubenswrapper[4783]: E0930 14:56:19.870406 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d59baeb0-565b-4feb-8689-8fc91e7e5176" containerName="storage" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.870437 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d59baeb0-565b-4feb-8689-8fc91e7e5176" containerName="storage" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.870721 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d59baeb0-565b-4feb-8689-8fc91e7e5176" containerName="storage" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.871518 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.874240 4783 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k2c62" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.874995 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.877886 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.878014 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.891269 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lbsnq"] Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.981460 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrd2c\" (UniqueName: \"kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.981503 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:19 crc kubenswrapper[4783]: I0930 14:56:19.981961 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.083881 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.083963 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrd2c\" (UniqueName: \"kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.083983 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.084203 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.084973 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.103607 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrd2c\" (UniqueName: \"kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c\") pod \"crc-storage-crc-lbsnq\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.191872 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.610104 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lbsnq"] Sep 30 14:56:20 crc kubenswrapper[4783]: I0930 14:56:20.856850 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d59baeb0-565b-4feb-8689-8fc91e7e5176" path="/var/lib/kubelet/pods/d59baeb0-565b-4feb-8689-8fc91e7e5176/volumes" Sep 30 14:56:21 crc kubenswrapper[4783]: I0930 14:56:21.176543 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lbsnq" event={"ID":"58c26074-f6cf-4cbc-8819-3a1edf292243","Type":"ContainerStarted","Data":"a75b327e9fee069f4475558e67f2526fd301d5635b9b529c05df7c50f80627eb"} Sep 30 14:56:22 crc kubenswrapper[4783]: I0930 14:56:22.188168 4783 generic.go:334] "Generic (PLEG): container finished" podID="58c26074-f6cf-4cbc-8819-3a1edf292243" containerID="b938a0d9ba3f53b85733f385863828cd1ef0e016f26da6cb21c01d8c62579a61" exitCode=0 Sep 30 14:56:22 crc kubenswrapper[4783]: I0930 14:56:22.188246 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lbsnq" event={"ID":"58c26074-f6cf-4cbc-8819-3a1edf292243","Type":"ContainerDied","Data":"b938a0d9ba3f53b85733f385863828cd1ef0e016f26da6cb21c01d8c62579a61"} Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.440690 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.540983 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage\") pod \"58c26074-f6cf-4cbc-8819-3a1edf292243\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.541105 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt\") pod \"58c26074-f6cf-4cbc-8819-3a1edf292243\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.541134 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrd2c\" (UniqueName: \"kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c\") pod \"58c26074-f6cf-4cbc-8819-3a1edf292243\" (UID: \"58c26074-f6cf-4cbc-8819-3a1edf292243\") " Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.541296 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "58c26074-f6cf-4cbc-8819-3a1edf292243" (UID: "58c26074-f6cf-4cbc-8819-3a1edf292243"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.542242 4783 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/58c26074-f6cf-4cbc-8819-3a1edf292243-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.682257 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c" (OuterVolumeSpecName: "kube-api-access-mrd2c") pod "58c26074-f6cf-4cbc-8819-3a1edf292243" (UID: "58c26074-f6cf-4cbc-8819-3a1edf292243"). InnerVolumeSpecName "kube-api-access-mrd2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.707700 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "58c26074-f6cf-4cbc-8819-3a1edf292243" (UID: "58c26074-f6cf-4cbc-8819-3a1edf292243"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.745696 4783 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/58c26074-f6cf-4cbc-8819-3a1edf292243-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:23 crc kubenswrapper[4783]: I0930 14:56:23.745745 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrd2c\" (UniqueName: \"kubernetes.io/projected/58c26074-f6cf-4cbc-8819-3a1edf292243-kube-api-access-mrd2c\") on node \"crc\" DevicePath \"\"" Sep 30 14:56:24 crc kubenswrapper[4783]: I0930 14:56:24.203804 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lbsnq" event={"ID":"58c26074-f6cf-4cbc-8819-3a1edf292243","Type":"ContainerDied","Data":"a75b327e9fee069f4475558e67f2526fd301d5635b9b529c05df7c50f80627eb"} Sep 30 14:56:24 crc kubenswrapper[4783]: I0930 14:56:24.203844 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lbsnq" Sep 30 14:56:24 crc kubenswrapper[4783]: I0930 14:56:24.204211 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a75b327e9fee069f4475558e67f2526fd301d5635b9b529c05df7c50f80627eb" Sep 30 14:56:37 crc kubenswrapper[4783]: I0930 14:56:37.674151 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:56:37 crc kubenswrapper[4783]: I0930 14:56:37.675200 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:57:07 crc kubenswrapper[4783]: I0930 14:57:07.674257 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:57:07 crc kubenswrapper[4783]: I0930 14:57:07.674776 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:57:07 crc kubenswrapper[4783]: I0930 14:57:07.674830 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 14:57:07 crc kubenswrapper[4783]: I0930 14:57:07.675396 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 14:57:07 crc kubenswrapper[4783]: I0930 14:57:07.675446 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4" gracePeriod=600 Sep 30 14:57:08 crc kubenswrapper[4783]: I0930 14:57:08.607247 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4" exitCode=0 Sep 30 14:57:08 crc kubenswrapper[4783]: I0930 14:57:08.607318 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4"} Sep 30 14:57:08 crc kubenswrapper[4783]: I0930 14:57:08.607662 4783 scope.go:117] "RemoveContainer" containerID="3208da15e229b9d38a4bb81cd565c8af5fcd6edd200e98edb9222088a291e587" Sep 30 14:57:09 crc kubenswrapper[4783]: I0930 14:57:09.619895 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06"} Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.135057 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:20 crc kubenswrapper[4783]: E0930 14:58:20.136123 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58c26074-f6cf-4cbc-8819-3a1edf292243" containerName="storage" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.136138 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="58c26074-f6cf-4cbc-8819-3a1edf292243" containerName="storage" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.136337 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="58c26074-f6cf-4cbc-8819-3a1edf292243" containerName="storage" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.137285 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.139119 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.139765 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-lvlsm" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.140783 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.141016 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.150532 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.178268 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.179568 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.183210 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.193566 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.281056 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.281115 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.281142 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rqhn\" (UniqueName: \"kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.281205 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.281670 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk7q9\" (UniqueName: \"kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384040 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384096 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384123 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rqhn\" (UniqueName: \"kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384494 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384531 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk7q9\" (UniqueName: \"kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384974 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.384979 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.385172 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.406279 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk7q9\" (UniqueName: \"kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9\") pod \"dnsmasq-dns-775cb64d69-k4mx5\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.406537 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rqhn\" (UniqueName: \"kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn\") pod \"dnsmasq-dns-87cd8867c-rcq8q\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.455685 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.495655 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.676852 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.705377 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.706665 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.743196 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.897784 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.898142 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw5vf\" (UniqueName: \"kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.898211 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:20 crc kubenswrapper[4783]: I0930 14:58:20.991085 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:20.999619 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:20.999723 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:20.999814 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw5vf\" (UniqueName: \"kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.000540 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.000733 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.021710 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw5vf\" (UniqueName: \"kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf\") pod \"dnsmasq-dns-bc7bd85-274wr\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.031623 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.033118 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.050290 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.051138 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.093982 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.100880 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.100919 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.100996 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrdhg\" (UniqueName: \"kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.123445 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:21 crc kubenswrapper[4783]: W0930 14:58:21.135918 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff552b6c_5d0b_4e05_9d04_2c076f8864bb.slice/crio-ad832ef29b67110419be5359a465980ee6880d6077045675ae1a3658737b0e55 WatchSource:0}: Error finding container ad832ef29b67110419be5359a465980ee6880d6077045675ae1a3658737b0e55: Status 404 returned error can't find the container with id ad832ef29b67110419be5359a465980ee6880d6077045675ae1a3658737b0e55 Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.195138 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" event={"ID":"c57cacbb-bcb7-4282-a70c-b93a581cef16","Type":"ContainerStarted","Data":"d9dab494c57c1e65e6c04e1c3e759d5a977b6e31edd656a38f584a9c26867b9e"} Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.202432 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrdhg\" (UniqueName: \"kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.202501 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.202529 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.202677 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" event={"ID":"ff552b6c-5d0b-4e05-9d04-2c076f8864bb","Type":"ContainerStarted","Data":"ad832ef29b67110419be5359a465980ee6880d6077045675ae1a3658737b0e55"} Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.203497 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.205454 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.220757 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrdhg\" (UniqueName: \"kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg\") pod \"dnsmasq-dns-5f455d6d69-qzwk6\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.349605 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.418351 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:21 crc kubenswrapper[4783]: W0930 14:58:21.443932 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod070a5d7e_873a_4749_bba6_76f3d406b0a2.slice/crio-69e3ae831a4b7dee4b5a2514029bdeb9ee646cc5f961810b40cf7c2b3c0f9182 WatchSource:0}: Error finding container 69e3ae831a4b7dee4b5a2514029bdeb9ee646cc5f961810b40cf7c2b3c0f9182: Status 404 returned error can't find the container with id 69e3ae831a4b7dee4b5a2514029bdeb9ee646cc5f961810b40cf7c2b3c0f9182 Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.837755 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:58:21 crc kubenswrapper[4783]: W0930 14:58:21.867174 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded77d292_efda_4cef_9290_e24de945ed95.slice/crio-d0e6a48b57e8f905556a76f4e9ab54888cc9bd959f601be3c63f220e861870e1 WatchSource:0}: Error finding container d0e6a48b57e8f905556a76f4e9ab54888cc9bd959f601be3c63f220e861870e1: Status 404 returned error can't find the container with id d0e6a48b57e8f905556a76f4e9ab54888cc9bd959f601be3c63f220e861870e1 Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.873972 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.875464 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878313 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878651 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wdvqz" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878728 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878800 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878875 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.878911 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.879025 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 14:58:21 crc kubenswrapper[4783]: I0930 14:58:21.896833 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.023946 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024027 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024082 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024142 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024167 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr5px\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024203 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024251 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024333 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024428 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024524 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.024675 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126393 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126474 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126513 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126564 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126586 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126811 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126839 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126869 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr5px\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126886 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126901 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.126943 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.127604 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.127676 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.128486 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.128719 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.128923 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.130700 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.130742 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6e27c1000c8130b4873ed2092beb58938e245e536766a028cc255f01ae931b18/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.132109 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.132570 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.133818 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.133922 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.144200 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr5px\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.159466 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.161121 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.163911 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.164698 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.164997 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.165352 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.165462 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.165520 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qc2cw" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.165629 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.172906 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.215884 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.220183 4783 generic.go:334] "Generic (PLEG): container finished" podID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerID="ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51" exitCode=0 Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.220307 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc7bd85-274wr" event={"ID":"070a5d7e-873a-4749-bba6-76f3d406b0a2","Type":"ContainerDied","Data":"ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.220337 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc7bd85-274wr" event={"ID":"070a5d7e-873a-4749-bba6-76f3d406b0a2","Type":"ContainerStarted","Data":"69e3ae831a4b7dee4b5a2514029bdeb9ee646cc5f961810b40cf7c2b3c0f9182"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.221580 4783 generic.go:334] "Generic (PLEG): container finished" podID="c57cacbb-bcb7-4282-a70c-b93a581cef16" containerID="f88937948675604db0cc46ed4a0121ebc8f99977535a9b86ddd8f93136f26361" exitCode=0 Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.221632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" event={"ID":"c57cacbb-bcb7-4282-a70c-b93a581cef16","Type":"ContainerDied","Data":"f88937948675604db0cc46ed4a0121ebc8f99977535a9b86ddd8f93136f26361"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.225081 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed77d292-efda-4cef-9290-e24de945ed95" containerID="1c6938fb7583aaf473f86ee780000367c78a67df1e1ae59174882fb29ffe3b6a" exitCode=0 Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.225155 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" event={"ID":"ed77d292-efda-4cef-9290-e24de945ed95","Type":"ContainerDied","Data":"1c6938fb7583aaf473f86ee780000367c78a67df1e1ae59174882fb29ffe3b6a"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.225184 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" event={"ID":"ed77d292-efda-4cef-9290-e24de945ed95","Type":"ContainerStarted","Data":"d0e6a48b57e8f905556a76f4e9ab54888cc9bd959f601be3c63f220e861870e1"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.227322 4783 generic.go:334] "Generic (PLEG): container finished" podID="ff552b6c-5d0b-4e05-9d04-2c076f8864bb" containerID="d21901fa8e36b83766e6db841a17781aee69492fc4b9b96db295ffeab16326e0" exitCode=0 Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.227359 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" event={"ID":"ff552b6c-5d0b-4e05-9d04-2c076f8864bb","Type":"ContainerDied","Data":"d21901fa8e36b83766e6db841a17781aee69492fc4b9b96db295ffeab16326e0"} Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.255731 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.332374 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.332423 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.332460 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.332474 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh2hx\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.332499 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336145 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336201 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336241 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336350 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.336385 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439644 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439699 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439724 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439757 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439784 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439807 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439895 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439920 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439951 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439974 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh2hx\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.439998 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.441306 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.442106 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.446723 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.447643 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.447893 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.464867 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.465813 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.467484 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.467540 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.467849 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2337f5b83f2849bc2e06ff73920d7ab842ea8dc6ef7d4167ca212a6758a9a8d4/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.473825 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.475013 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh2hx\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.522524 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.577304 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.694542 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.750329 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rqhn\" (UniqueName: \"kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn\") pod \"c57cacbb-bcb7-4282-a70c-b93a581cef16\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.750451 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config\") pod \"c57cacbb-bcb7-4282-a70c-b93a581cef16\" (UID: \"c57cacbb-bcb7-4282-a70c-b93a581cef16\") " Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.754545 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn" (OuterVolumeSpecName: "kube-api-access-7rqhn") pod "c57cacbb-bcb7-4282-a70c-b93a581cef16" (UID: "c57cacbb-bcb7-4282-a70c-b93a581cef16"). InnerVolumeSpecName "kube-api-access-7rqhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.775700 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config" (OuterVolumeSpecName: "config") pod "c57cacbb-bcb7-4282-a70c-b93a581cef16" (UID: "c57cacbb-bcb7-4282-a70c-b93a581cef16"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.821860 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.851659 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk7q9\" (UniqueName: \"kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9\") pod \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.851703 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config\") pod \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.851750 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc\") pod \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\" (UID: \"ff552b6c-5d0b-4e05-9d04-2c076f8864bb\") " Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.852063 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57cacbb-bcb7-4282-a70c-b93a581cef16-config\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.852090 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rqhn\" (UniqueName: \"kubernetes.io/projected/c57cacbb-bcb7-4282-a70c-b93a581cef16-kube-api-access-7rqhn\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.857979 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9" (OuterVolumeSpecName: "kube-api-access-qk7q9") pod "ff552b6c-5d0b-4e05-9d04-2c076f8864bb" (UID: "ff552b6c-5d0b-4e05-9d04-2c076f8864bb"). InnerVolumeSpecName "kube-api-access-qk7q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.874384 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.876176 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config" (OuterVolumeSpecName: "config") pod "ff552b6c-5d0b-4e05-9d04-2c076f8864bb" (UID: "ff552b6c-5d0b-4e05-9d04-2c076f8864bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:58:22 crc kubenswrapper[4783]: W0930 14:58:22.878516 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ebf1c70_b736_461b_9f07_449542b8c622.slice/crio-1173d623ca7ff5d2b08a5dc1f35ac4c24a9d7ef6b38cb40b7d4009a05526b3e5 WatchSource:0}: Error finding container 1173d623ca7ff5d2b08a5dc1f35ac4c24a9d7ef6b38cb40b7d4009a05526b3e5: Status 404 returned error can't find the container with id 1173d623ca7ff5d2b08a5dc1f35ac4c24a9d7ef6b38cb40b7d4009a05526b3e5 Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.881115 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff552b6c-5d0b-4e05-9d04-2c076f8864bb" (UID: "ff552b6c-5d0b-4e05-9d04-2c076f8864bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.954101 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk7q9\" (UniqueName: \"kubernetes.io/projected/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-kube-api-access-qk7q9\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.954480 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-config\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:22 crc kubenswrapper[4783]: I0930 14:58:22.954496 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff552b6c-5d0b-4e05-9d04-2c076f8864bb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.035140 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 14:58:23 crc kubenswrapper[4783]: E0930 14:58:23.035472 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff552b6c-5d0b-4e05-9d04-2c076f8864bb" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.035489 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff552b6c-5d0b-4e05-9d04-2c076f8864bb" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: E0930 14:58:23.035512 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c57cacbb-bcb7-4282-a70c-b93a581cef16" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.035520 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="c57cacbb-bcb7-4282-a70c-b93a581cef16" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.035655 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="c57cacbb-bcb7-4282-a70c-b93a581cef16" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.035669 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff552b6c-5d0b-4e05-9d04-2c076f8864bb" containerName="init" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.036427 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.045697 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.046029 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.046146 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nt7kw" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.053165 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.053416 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.060177 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.063459 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.156961 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157011 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9llf\" (UniqueName: \"kubernetes.io/projected/95cad87b-982f-424b-a758-5058c04ea9db-kube-api-access-t9llf\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157059 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-secrets\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157087 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-kolla-config\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157113 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-config-data-default\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157156 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157177 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157203 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.157248 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95cad87b-982f-424b-a758-5058c04ea9db-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.241039 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" event={"ID":"ed77d292-efda-4cef-9290-e24de945ed95","Type":"ContainerStarted","Data":"362b355a9e4656ee6b8985689d034ebc462c4d7bc90ca51007367b4b344f9123"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.241538 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.244437 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerStarted","Data":"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.244471 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerStarted","Data":"1173d623ca7ff5d2b08a5dc1f35ac4c24a9d7ef6b38cb40b7d4009a05526b3e5"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.246604 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.246622 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-775cb64d69-k4mx5" event={"ID":"ff552b6c-5d0b-4e05-9d04-2c076f8864bb","Type":"ContainerDied","Data":"ad832ef29b67110419be5359a465980ee6880d6077045675ae1a3658737b0e55"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.246683 4783 scope.go:117] "RemoveContainer" containerID="d21901fa8e36b83766e6db841a17781aee69492fc4b9b96db295ffeab16326e0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.248536 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc7bd85-274wr" event={"ID":"070a5d7e-873a-4749-bba6-76f3d406b0a2","Type":"ContainerStarted","Data":"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.251768 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.265944 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266044 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9llf\" (UniqueName: \"kubernetes.io/projected/95cad87b-982f-424b-a758-5058c04ea9db-kube-api-access-t9llf\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266117 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-secrets\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266196 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-kolla-config\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266264 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-config-data-default\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266334 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266360 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266456 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.266527 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95cad87b-982f-424b-a758-5058c04ea9db-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.267005 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95cad87b-982f-424b-a758-5058c04ea9db-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.268958 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" podStartSLOduration=2.268946118 podStartE2EDuration="2.268946118s" podCreationTimestamp="2025-09-30 14:58:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:23.268287007 +0000 UTC m=+5003.199753334" watchObservedRunningTime="2025-09-30 14:58:23.268946118 +0000 UTC m=+5003.200412425" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.269931 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-kolla-config\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.270568 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-config-data-default\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.275996 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95cad87b-982f-424b-a758-5058c04ea9db-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.276126 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-secrets\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.276254 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.276806 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" event={"ID":"c57cacbb-bcb7-4282-a70c-b93a581cef16","Type":"ContainerDied","Data":"d9dab494c57c1e65e6c04e1c3e759d5a977b6e31edd656a38f584a9c26867b9e"} Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.276927 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-87cd8867c-rcq8q" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.281950 4783 scope.go:117] "RemoveContainer" containerID="f88937948675604db0cc46ed4a0121ebc8f99977535a9b86ddd8f93136f26361" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.285457 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.285498 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/be12e944bdb8ac7df6949f40e6b36093a2eb217c925498e6e7f7cf871b54dfee/globalmount\"" pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.291385 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cad87b-982f-424b-a758-5058c04ea9db-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.292803 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9llf\" (UniqueName: \"kubernetes.io/projected/95cad87b-982f-424b-a758-5058c04ea9db-kube-api-access-t9llf\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.322649 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.339332 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bc7bd85-274wr" podStartSLOduration=3.339311051 podStartE2EDuration="3.339311051s" podCreationTimestamp="2025-09-30 14:58:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:23.331411928 +0000 UTC m=+5003.262878255" watchObservedRunningTime="2025-09-30 14:58:23.339311051 +0000 UTC m=+5003.270777488" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.376035 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.382100 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-87cd8867c-rcq8q"] Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.387024 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dbfee86f-ef40-497d-a722-e0707be6fe2c\") pod \"openstack-galera-0\" (UID: \"95cad87b-982f-424b-a758-5058c04ea9db\") " pod="openstack/openstack-galera-0" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.424890 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.477423 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-775cb64d69-k4mx5"] Sep 30 14:58:23 crc kubenswrapper[4783]: E0930 14:58:23.515256 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff552b6c_5d0b_4e05_9d04_2c076f8864bb.slice\": RecentStats: unable to find data in memory cache]" Sep 30 14:58:23 crc kubenswrapper[4783]: I0930 14:58:23.658875 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.089644 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.284934 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95cad87b-982f-424b-a758-5058c04ea9db","Type":"ContainerStarted","Data":"09ae9409afed63d86450daa0552196dc7b81172275da444f2706b76dc8b636de"} Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.285287 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95cad87b-982f-424b-a758-5058c04ea9db","Type":"ContainerStarted","Data":"a5e3dfe28ae1677aef5e71b7f1ff43db9f969a68e1e656045bbfe5ce5b7bf691"} Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.301144 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerStarted","Data":"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6"} Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.301194 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerStarted","Data":"0fa8fd85d9edc181a24939e97ed582dac90fa570ead01a8cdb2b611bba938134"} Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.552929 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.554157 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.557171 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7jndt" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.557182 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.557749 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.558008 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.570491 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.653031 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.654347 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.656257 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.658644 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-tv7r6" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.660955 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.664217 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700342 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6mcz\" (UniqueName: \"kubernetes.io/projected/fa616c4a-606e-4d41-968e-9e9f6b288556-kube-api-access-f6mcz\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700388 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700424 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700452 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700472 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700515 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700558 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700581 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.700604 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802598 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802654 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802688 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6mcz\" (UniqueName: \"kubernetes.io/projected/fa616c4a-606e-4d41-968e-9e9f6b288556-kube-api-access-f6mcz\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802718 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802743 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-config-data\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802781 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802816 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802847 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802877 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802906 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhf89\" (UniqueName: \"kubernetes.io/projected/d51eac18-20ce-4903-ba61-35e4df01c2f5-kube-api-access-fhf89\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802924 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802965 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-kolla-config\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.802984 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.803009 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.803463 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.804319 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.807723 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.808280 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa616c4a-606e-4d41-968e-9e9f6b288556-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.808805 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.809060 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.809085 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/76e394db2a7eb41f0d2013cfd0dee53230c48693d6f10a0aecab4a3e783aa024/globalmount\"" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.810413 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.827972 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fa616c4a-606e-4d41-968e-9e9f6b288556-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.832903 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6mcz\" (UniqueName: \"kubernetes.io/projected/fa616c4a-606e-4d41-968e-9e9f6b288556-kube-api-access-f6mcz\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.849961 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c64a9b7a-3a58-4694-84f8-0805e7393b9a\") pod \"openstack-cell1-galera-0\" (UID: \"fa616c4a-606e-4d41-968e-9e9f6b288556\") " pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.860732 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c57cacbb-bcb7-4282-a70c-b93a581cef16" path="/var/lib/kubelet/pods/c57cacbb-bcb7-4282-a70c-b93a581cef16/volumes" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.861486 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff552b6c-5d0b-4e05-9d04-2c076f8864bb" path="/var/lib/kubelet/pods/ff552b6c-5d0b-4e05-9d04-2c076f8864bb/volumes" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.875937 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.904060 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-kolla-config\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.904144 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.904186 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-config-data\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.904291 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.904325 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhf89\" (UniqueName: \"kubernetes.io/projected/d51eac18-20ce-4903-ba61-35e4df01c2f5-kube-api-access-fhf89\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.906723 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-config-data\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.910504 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d51eac18-20ce-4903-ba61-35e4df01c2f5-kolla-config\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.911970 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.914642 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d51eac18-20ce-4903-ba61-35e4df01c2f5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.925521 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhf89\" (UniqueName: \"kubernetes.io/projected/d51eac18-20ce-4903-ba61-35e4df01c2f5-kube-api-access-fhf89\") pod \"memcached-0\" (UID: \"d51eac18-20ce-4903-ba61-35e4df01c2f5\") " pod="openstack/memcached-0" Sep 30 14:58:24 crc kubenswrapper[4783]: I0930 14:58:24.982526 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 14:58:25 crc kubenswrapper[4783]: I0930 14:58:25.203919 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 14:58:25 crc kubenswrapper[4783]: I0930 14:58:25.324626 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 14:58:25 crc kubenswrapper[4783]: I0930 14:58:25.327850 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d51eac18-20ce-4903-ba61-35e4df01c2f5","Type":"ContainerStarted","Data":"be01362cddaaaeba64af8821d06063f3b6c89bcbcd26ebf2e87159a7010905da"} Sep 30 14:58:26 crc kubenswrapper[4783]: I0930 14:58:26.339173 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d51eac18-20ce-4903-ba61-35e4df01c2f5","Type":"ContainerStarted","Data":"b5b69b3ac1dd1b855dd29ec63d59d3bfc4d29f764e53b6474154655c82324a99"} Sep 30 14:58:26 crc kubenswrapper[4783]: I0930 14:58:26.339534 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 14:58:26 crc kubenswrapper[4783]: I0930 14:58:26.340801 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa616c4a-606e-4d41-968e-9e9f6b288556","Type":"ContainerStarted","Data":"188c08daaa18e81eca6d5f5e5fbc38873dd73fcaee92efae778d831e583e912d"} Sep 30 14:58:26 crc kubenswrapper[4783]: I0930 14:58:26.340837 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa616c4a-606e-4d41-968e-9e9f6b288556","Type":"ContainerStarted","Data":"232e39e8369bb4c7bc4113050a3de208a85c04ca14f2e11038b45b649bdc00c0"} Sep 30 14:58:26 crc kubenswrapper[4783]: I0930 14:58:26.370918 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.370896741 podStartE2EDuration="2.370896741s" podCreationTimestamp="2025-09-30 14:58:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:26.366117538 +0000 UTC m=+5006.297583845" watchObservedRunningTime="2025-09-30 14:58:26.370896741 +0000 UTC m=+5006.302363048" Sep 30 14:58:28 crc kubenswrapper[4783]: I0930 14:58:28.355357 4783 generic.go:334] "Generic (PLEG): container finished" podID="95cad87b-982f-424b-a758-5058c04ea9db" containerID="09ae9409afed63d86450daa0552196dc7b81172275da444f2706b76dc8b636de" exitCode=0 Sep 30 14:58:28 crc kubenswrapper[4783]: I0930 14:58:28.355468 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95cad87b-982f-424b-a758-5058c04ea9db","Type":"ContainerDied","Data":"09ae9409afed63d86450daa0552196dc7b81172275da444f2706b76dc8b636de"} Sep 30 14:58:29 crc kubenswrapper[4783]: I0930 14:58:29.364195 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95cad87b-982f-424b-a758-5058c04ea9db","Type":"ContainerStarted","Data":"baba12e4db8ddadc178751b94f4cd25c38b5576bdd87c426ca73d3f0202e0930"} Sep 30 14:58:29 crc kubenswrapper[4783]: I0930 14:58:29.366185 4783 generic.go:334] "Generic (PLEG): container finished" podID="fa616c4a-606e-4d41-968e-9e9f6b288556" containerID="188c08daaa18e81eca6d5f5e5fbc38873dd73fcaee92efae778d831e583e912d" exitCode=0 Sep 30 14:58:29 crc kubenswrapper[4783]: I0930 14:58:29.366245 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa616c4a-606e-4d41-968e-9e9f6b288556","Type":"ContainerDied","Data":"188c08daaa18e81eca6d5f5e5fbc38873dd73fcaee92efae778d831e583e912d"} Sep 30 14:58:29 crc kubenswrapper[4783]: I0930 14:58:29.407788 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.407649718 podStartE2EDuration="7.407649718s" podCreationTimestamp="2025-09-30 14:58:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:29.402413669 +0000 UTC m=+5009.333879976" watchObservedRunningTime="2025-09-30 14:58:29.407649718 +0000 UTC m=+5009.339116035" Sep 30 14:58:30 crc kubenswrapper[4783]: I0930 14:58:30.377905 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa616c4a-606e-4d41-968e-9e9f6b288556","Type":"ContainerStarted","Data":"b0bbdfca47186b85d86a516945175e9ea387b526c4920c08fc569b5952192789"} Sep 30 14:58:30 crc kubenswrapper[4783]: I0930 14:58:30.399084 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.399057018 podStartE2EDuration="7.399057018s" podCreationTimestamp="2025-09-30 14:58:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:30.398068437 +0000 UTC m=+5010.329534774" watchObservedRunningTime="2025-09-30 14:58:30.399057018 +0000 UTC m=+5010.330523325" Sep 30 14:58:31 crc kubenswrapper[4783]: I0930 14:58:31.052202 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:31 crc kubenswrapper[4783]: I0930 14:58:31.351334 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:58:31 crc kubenswrapper[4783]: I0930 14:58:31.400676 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:31 crc kubenswrapper[4783]: I0930 14:58:31.400964 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bc7bd85-274wr" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="dnsmasq-dns" containerID="cri-o://fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92" gracePeriod=10 Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.009865 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.118090 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config\") pod \"070a5d7e-873a-4749-bba6-76f3d406b0a2\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.118166 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc\") pod \"070a5d7e-873a-4749-bba6-76f3d406b0a2\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.118249 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw5vf\" (UniqueName: \"kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf\") pod \"070a5d7e-873a-4749-bba6-76f3d406b0a2\" (UID: \"070a5d7e-873a-4749-bba6-76f3d406b0a2\") " Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.124502 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf" (OuterVolumeSpecName: "kube-api-access-sw5vf") pod "070a5d7e-873a-4749-bba6-76f3d406b0a2" (UID: "070a5d7e-873a-4749-bba6-76f3d406b0a2"). InnerVolumeSpecName "kube-api-access-sw5vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.157652 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "070a5d7e-873a-4749-bba6-76f3d406b0a2" (UID: "070a5d7e-873a-4749-bba6-76f3d406b0a2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.160653 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config" (OuterVolumeSpecName: "config") pod "070a5d7e-873a-4749-bba6-76f3d406b0a2" (UID: "070a5d7e-873a-4749-bba6-76f3d406b0a2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.220015 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw5vf\" (UniqueName: \"kubernetes.io/projected/070a5d7e-873a-4749-bba6-76f3d406b0a2-kube-api-access-sw5vf\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.220050 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-config\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.220060 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/070a5d7e-873a-4749-bba6-76f3d406b0a2-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.394829 4783 generic.go:334] "Generic (PLEG): container finished" podID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerID="fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92" exitCode=0 Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.394920 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc7bd85-274wr" event={"ID":"070a5d7e-873a-4749-bba6-76f3d406b0a2","Type":"ContainerDied","Data":"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92"} Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.394924 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc7bd85-274wr" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.394989 4783 scope.go:117] "RemoveContainer" containerID="fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.394969 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc7bd85-274wr" event={"ID":"070a5d7e-873a-4749-bba6-76f3d406b0a2","Type":"ContainerDied","Data":"69e3ae831a4b7dee4b5a2514029bdeb9ee646cc5f961810b40cf7c2b3c0f9182"} Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.416759 4783 scope.go:117] "RemoveContainer" containerID="ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.434039 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.442110 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bc7bd85-274wr"] Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.453068 4783 scope.go:117] "RemoveContainer" containerID="fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92" Sep 30 14:58:32 crc kubenswrapper[4783]: E0930 14:58:32.453466 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92\": container with ID starting with fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92 not found: ID does not exist" containerID="fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.453496 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92"} err="failed to get container status \"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92\": rpc error: code = NotFound desc = could not find container \"fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92\": container with ID starting with fd54d0ea9f862234a482ab675fe90d8aef52d1b217369d48a5537547e4310e92 not found: ID does not exist" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.453516 4783 scope.go:117] "RemoveContainer" containerID="ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51" Sep 30 14:58:32 crc kubenswrapper[4783]: E0930 14:58:32.453983 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51\": container with ID starting with ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51 not found: ID does not exist" containerID="ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.454001 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51"} err="failed to get container status \"ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51\": rpc error: code = NotFound desc = could not find container \"ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51\": container with ID starting with ce2493f1abbc941e019a3fceac998a1611c2cbbf450109f0a5e6ab35d1987c51 not found: ID does not exist" Sep 30 14:58:32 crc kubenswrapper[4783]: I0930 14:58:32.859694 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" path="/var/lib/kubelet/pods/070a5d7e-873a-4749-bba6-76f3d406b0a2/volumes" Sep 30 14:58:33 crc kubenswrapper[4783]: E0930 14:58:33.064153 4783 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.88:52912->38.129.56.88:36147: write tcp 38.129.56.88:52912->38.129.56.88:36147: write: broken pipe Sep 30 14:58:33 crc kubenswrapper[4783]: I0930 14:58:33.659998 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 14:58:33 crc kubenswrapper[4783]: I0930 14:58:33.660040 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 14:58:34 crc kubenswrapper[4783]: I0930 14:58:34.877397 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:34 crc kubenswrapper[4783]: I0930 14:58:34.877794 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:34 crc kubenswrapper[4783]: I0930 14:58:34.984083 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 14:58:35 crc kubenswrapper[4783]: I0930 14:58:35.710363 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 14:58:35 crc kubenswrapper[4783]: I0930 14:58:35.752938 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 14:58:36 crc kubenswrapper[4783]: I0930 14:58:36.934079 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:36 crc kubenswrapper[4783]: I0930 14:58:36.976630 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 14:58:54 crc kubenswrapper[4783]: E0930 14:58:54.152171 4783 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b335c6_05a6_4996_8412_8d06804cb213.slice/crio-019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6.scope\": RecentStats: unable to find data in memory cache]" Sep 30 14:58:54 crc kubenswrapper[4783]: I0930 14:58:54.605033 4783 generic.go:334] "Generic (PLEG): container finished" podID="59b335c6-05a6-4996-8412-8d06804cb213" containerID="019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6" exitCode=0 Sep 30 14:58:54 crc kubenswrapper[4783]: I0930 14:58:54.605184 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerDied","Data":"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6"} Sep 30 14:58:54 crc kubenswrapper[4783]: I0930 14:58:54.607694 4783 generic.go:334] "Generic (PLEG): container finished" podID="4ebf1c70-b736-461b-9f07-449542b8c622" containerID="6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac" exitCode=0 Sep 30 14:58:54 crc kubenswrapper[4783]: I0930 14:58:54.607756 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerDied","Data":"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac"} Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.617345 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerStarted","Data":"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100"} Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.617543 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.620318 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerStarted","Data":"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a"} Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.620558 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.652259 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=35.652207952 podStartE2EDuration="35.652207952s" podCreationTimestamp="2025-09-30 14:58:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:55.646668915 +0000 UTC m=+5035.578135242" watchObservedRunningTime="2025-09-30 14:58:55.652207952 +0000 UTC m=+5035.583674259" Sep 30 14:58:55 crc kubenswrapper[4783]: I0930 14:58:55.677509 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.677483571 podStartE2EDuration="34.677483571s" podCreationTimestamp="2025-09-30 14:58:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:58:55.672322656 +0000 UTC m=+5035.603788983" watchObservedRunningTime="2025-09-30 14:58:55.677483571 +0000 UTC m=+5035.608949868" Sep 30 14:59:12 crc kubenswrapper[4783]: I0930 14:59:12.260427 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 14:59:12 crc kubenswrapper[4783]: I0930 14:59:12.824413 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.355585 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 14:59:16 crc kubenswrapper[4783]: E0930 14:59:16.356448 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="dnsmasq-dns" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.356466 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="dnsmasq-dns" Sep 30 14:59:16 crc kubenswrapper[4783]: E0930 14:59:16.356489 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="init" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.356498 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="init" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.356648 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="070a5d7e-873a-4749-bba6-76f3d406b0a2" containerName="dnsmasq-dns" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.357532 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.368014 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.520578 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.520796 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f25mm\" (UniqueName: \"kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.520879 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.622318 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.622444 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f25mm\" (UniqueName: \"kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.622481 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.623704 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.623704 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.642244 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f25mm\" (UniqueName: \"kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm\") pod \"dnsmasq-dns-6885566dd9-qx49q\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:16 crc kubenswrapper[4783]: I0930 14:59:16.676534 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.028158 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.115710 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 14:59:17 crc kubenswrapper[4783]: W0930 14:59:17.119551 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c0c4f74_bced_436a_b552_580e064c6d70.slice/crio-c98a968c7d5ccac70b55aa886d492ade94fce2e1b482d778506e919e1970b057 WatchSource:0}: Error finding container c98a968c7d5ccac70b55aa886d492ade94fce2e1b482d778506e919e1970b057: Status 404 returned error can't find the container with id c98a968c7d5ccac70b55aa886d492ade94fce2e1b482d778506e919e1970b057 Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.694215 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.796810 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c0c4f74-bced-436a-b552-580e064c6d70" containerID="5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815" exitCode=0 Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.796864 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" event={"ID":"6c0c4f74-bced-436a-b552-580e064c6d70","Type":"ContainerDied","Data":"5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815"} Sep 30 14:59:17 crc kubenswrapper[4783]: I0930 14:59:17.796895 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" event={"ID":"6c0c4f74-bced-436a-b552-580e064c6d70","Type":"ContainerStarted","Data":"c98a968c7d5ccac70b55aa886d492ade94fce2e1b482d778506e919e1970b057"} Sep 30 14:59:18 crc kubenswrapper[4783]: I0930 14:59:18.808507 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" event={"ID":"6c0c4f74-bced-436a-b552-580e064c6d70","Type":"ContainerStarted","Data":"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98"} Sep 30 14:59:18 crc kubenswrapper[4783]: I0930 14:59:18.808861 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:18 crc kubenswrapper[4783]: I0930 14:59:18.826931 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" podStartSLOduration=2.826910421 podStartE2EDuration="2.826910421s" podCreationTimestamp="2025-09-30 14:59:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 14:59:18.824573426 +0000 UTC m=+5058.756039733" watchObservedRunningTime="2025-09-30 14:59:18.826910421 +0000 UTC m=+5058.758376728" Sep 30 14:59:21 crc kubenswrapper[4783]: I0930 14:59:21.041718 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="rabbitmq" containerID="cri-o://3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100" gracePeriod=604796 Sep 30 14:59:21 crc kubenswrapper[4783]: I0930 14:59:21.762372 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="rabbitmq" containerID="cri-o://fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a" gracePeriod=604796 Sep 30 14:59:22 crc kubenswrapper[4783]: I0930 14:59:22.256385 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.242:5671: connect: connection refused" Sep 30 14:59:22 crc kubenswrapper[4783]: I0930 14:59:22.822692 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.243:5671: connect: connection refused" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.036180 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.044218 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.066977 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.125504 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dtdj\" (UniqueName: \"kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.125766 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.125870 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.226380 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dtdj\" (UniqueName: \"kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.226733 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.226866 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.227270 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.227563 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.250728 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dtdj\" (UniqueName: \"kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj\") pod \"redhat-marketplace-xxcsh\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.370818 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.597579 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:25 crc kubenswrapper[4783]: W0930 14:59:25.604447 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ec8eb44_7d5c_474e_a14b_e5fbae2f17a4.slice/crio-285b009d3379ea58f4571feb496aa208b48e95fca57bff69297b17e375288212 WatchSource:0}: Error finding container 285b009d3379ea58f4571feb496aa208b48e95fca57bff69297b17e375288212: Status 404 returned error can't find the container with id 285b009d3379ea58f4571feb496aa208b48e95fca57bff69297b17e375288212 Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.868278 4783 generic.go:334] "Generic (PLEG): container finished" podID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerID="3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e" exitCode=0 Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.868323 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerDied","Data":"3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e"} Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.868351 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerStarted","Data":"285b009d3379ea58f4571feb496aa208b48e95fca57bff69297b17e375288212"} Sep 30 14:59:25 crc kubenswrapper[4783]: I0930 14:59:25.870179 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 14:59:26 crc kubenswrapper[4783]: I0930 14:59:26.678950 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 14:59:26 crc kubenswrapper[4783]: I0930 14:59:26.729253 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:59:26 crc kubenswrapper[4783]: I0930 14:59:26.729532 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="dnsmasq-dns" containerID="cri-o://362b355a9e4656ee6b8985689d034ebc462c4d7bc90ca51007367b4b344f9123" gracePeriod=10 Sep 30 14:59:26 crc kubenswrapper[4783]: I0930 14:59:26.877789 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed77d292-efda-4cef-9290-e24de945ed95" containerID="362b355a9e4656ee6b8985689d034ebc462c4d7bc90ca51007367b4b344f9123" exitCode=0 Sep 30 14:59:26 crc kubenswrapper[4783]: I0930 14:59:26.877851 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" event={"ID":"ed77d292-efda-4cef-9290-e24de945ed95","Type":"ContainerDied","Data":"362b355a9e4656ee6b8985689d034ebc462c4d7bc90ca51007367b4b344f9123"} Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.386084 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.560037 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrdhg\" (UniqueName: \"kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg\") pod \"ed77d292-efda-4cef-9290-e24de945ed95\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.560162 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config\") pod \"ed77d292-efda-4cef-9290-e24de945ed95\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.560200 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc\") pod \"ed77d292-efda-4cef-9290-e24de945ed95\" (UID: \"ed77d292-efda-4cef-9290-e24de945ed95\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.571278 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg" (OuterVolumeSpecName: "kube-api-access-mrdhg") pod "ed77d292-efda-4cef-9290-e24de945ed95" (UID: "ed77d292-efda-4cef-9290-e24de945ed95"). InnerVolumeSpecName "kube-api-access-mrdhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.602342 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config" (OuterVolumeSpecName: "config") pod "ed77d292-efda-4cef-9290-e24de945ed95" (UID: "ed77d292-efda-4cef-9290-e24de945ed95"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.608745 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed77d292-efda-4cef-9290-e24de945ed95" (UID: "ed77d292-efda-4cef-9290-e24de945ed95"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.655266 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.662015 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-config\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.662057 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed77d292-efda-4cef-9290-e24de945ed95-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.662068 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrdhg\" (UniqueName: \"kubernetes.io/projected/ed77d292-efda-4cef-9290-e24de945ed95-kube-api-access-mrdhg\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.762550 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.762602 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.762689 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.762730 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.762869 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763055 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763084 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763118 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763171 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763215 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr5px\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763261 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf\") pod \"4ebf1c70-b736-461b-9f07-449542b8c622\" (UID: \"4ebf1c70-b736-461b-9f07-449542b8c622\") " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.763901 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.764033 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.766363 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.766458 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.767055 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info" (OuterVolumeSpecName: "pod-info") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.770083 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.770896 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px" (OuterVolumeSpecName: "kube-api-access-rr5px") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "kube-api-access-rr5px". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.782037 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5" (OuterVolumeSpecName: "persistence") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.785006 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data" (OuterVolumeSpecName: "config-data") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.813803 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf" (OuterVolumeSpecName: "server-conf") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.849979 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4ebf1c70-b736-461b-9f07-449542b8c622" (UID: "4ebf1c70-b736-461b-9f07-449542b8c622"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.867403 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.867847 4783 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ebf1c70-b736-461b-9f07-449542b8c622-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.867930 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.867996 4783 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ebf1c70-b736-461b-9f07-449542b8c622-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.868142 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") on node \"crc\" " Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869619 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869709 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869734 4783 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869759 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr5px\" (UniqueName: \"kubernetes.io/projected/4ebf1c70-b736-461b-9f07-449542b8c622-kube-api-access-rr5px\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869799 4783 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ebf1c70-b736-461b-9f07-449542b8c622-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.869812 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ebf1c70-b736-461b-9f07-449542b8c622-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.888387 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.888482 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerDied","Data":"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100"} Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.888534 4783 scope.go:117] "RemoveContainer" containerID="3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.888973 4783 generic.go:334] "Generic (PLEG): container finished" podID="4ebf1c70-b736-461b-9f07-449542b8c622" containerID="3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100" exitCode=0 Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.889035 4783 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.889144 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4ebf1c70-b736-461b-9f07-449542b8c622","Type":"ContainerDied","Data":"1173d623ca7ff5d2b08a5dc1f35ac4c24a9d7ef6b38cb40b7d4009a05526b3e5"} Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.889422 4783 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5") on node "crc" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.893301 4783 generic.go:334] "Generic (PLEG): container finished" podID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerID="34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e" exitCode=0 Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.893428 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerDied","Data":"34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e"} Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.899244 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" event={"ID":"ed77d292-efda-4cef-9290-e24de945ed95","Type":"ContainerDied","Data":"d0e6a48b57e8f905556a76f4e9ab54888cc9bd959f601be3c63f220e861870e1"} Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.899303 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f455d6d69-qzwk6" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.940727 4783 scope.go:117] "RemoveContainer" containerID="6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.969548 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.972611 4783 reconciler_common.go:293] "Volume detached for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.979312 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.983446 4783 scope.go:117] "RemoveContainer" containerID="3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.991549 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:59:27 crc kubenswrapper[4783]: E0930 14:59:27.992834 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100\": container with ID starting with 3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100 not found: ID does not exist" containerID="3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.992873 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100"} err="failed to get container status \"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100\": rpc error: code = NotFound desc = could not find container \"3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100\": container with ID starting with 3c2f2cc9723be8b8a1175e15db9faa5204a022c4d1d041f774a6b531b7dfd100 not found: ID does not exist" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.992903 4783 scope.go:117] "RemoveContainer" containerID="6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac" Sep 30 14:59:27 crc kubenswrapper[4783]: E0930 14:59:27.993271 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac\": container with ID starting with 6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac not found: ID does not exist" containerID="6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.993302 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac"} err="failed to get container status \"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac\": rpc error: code = NotFound desc = could not find container \"6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac\": container with ID starting with 6530636e6ee9bb003af77089ce7690fa85e5bdd8573d65c96bae4f1c38384aac not found: ID does not exist" Sep 30 14:59:27 crc kubenswrapper[4783]: I0930 14:59:27.993323 4783 scope.go:117] "RemoveContainer" containerID="362b355a9e4656ee6b8985689d034ebc462c4d7bc90ca51007367b4b344f9123" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.003330 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f455d6d69-qzwk6"] Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.022594 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.023008 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="init" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023028 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="init" Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.023053 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="rabbitmq" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023060 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="rabbitmq" Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.023074 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="setup-container" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023082 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="setup-container" Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.023100 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="dnsmasq-dns" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023107 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="dnsmasq-dns" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023326 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" containerName="rabbitmq" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.023343 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed77d292-efda-4cef-9290-e24de945ed95" containerName="dnsmasq-dns" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.024558 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.028345 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.028615 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.028751 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.028354 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.028993 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.029604 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.029758 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wdvqz" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.032883 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.055755 4783 scope.go:117] "RemoveContainer" containerID="1c6938fb7583aaf473f86ee780000367c78a67df1e1ae59174882fb29ffe3b6a" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.176731 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.176825 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.176863 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.176910 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ed3bb681-a6c6-4336-8664-36e153896c36-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.176965 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177003 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-config-data\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177036 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177069 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7shc\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-kube-api-access-k7shc\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177098 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ed3bb681-a6c6-4336-8664-36e153896c36-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177199 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.177258 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278776 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278818 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278856 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278884 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278905 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278934 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ed3bb681-a6c6-4336-8664-36e153896c36-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278968 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.278994 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-config-data\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.279014 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.279033 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7shc\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-kube-api-access-k7shc\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.279049 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ed3bb681-a6c6-4336-8664-36e153896c36-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.280306 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.280870 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.281185 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-config-data\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.281464 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.284199 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ed3bb681-a6c6-4336-8664-36e153896c36-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.284288 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ed3bb681-a6c6-4336-8664-36e153896c36-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.284648 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.284898 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.284938 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6e27c1000c8130b4873ed2092beb58938e245e536766a028cc255f01ae931b18/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.287166 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ed3bb681-a6c6-4336-8664-36e153896c36-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.289596 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.299824 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7shc\" (UniqueName: \"kubernetes.io/projected/ed3bb681-a6c6-4336-8664-36e153896c36-kube-api-access-k7shc\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.325255 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c271de1c-3c9b-4270-9ed8-3deccb2595e5\") pod \"rabbitmq-server-0\" (UID: \"ed3bb681-a6c6-4336-8664-36e153896c36\") " pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.471544 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.521678 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.599813 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.599949 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600032 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600073 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600317 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600349 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qh2hx\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600378 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600417 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600458 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600486 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.600507 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie\") pod \"59b335c6-05a6-4996-8412-8d06804cb213\" (UID: \"59b335c6-05a6-4996-8412-8d06804cb213\") " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.601389 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.601676 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.607645 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.607699 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx" (OuterVolumeSpecName: "kube-api-access-qh2hx") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "kube-api-access-qh2hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.612003 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.619566 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.619933 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975" (OuterVolumeSpecName: "persistence") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.620006 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info" (OuterVolumeSpecName: "pod-info") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.638242 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data" (OuterVolumeSpecName: "config-data") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.659354 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf" (OuterVolumeSpecName: "server-conf") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702302 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702335 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702368 4783 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") on node \"crc\" " Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702381 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qh2hx\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-kube-api-access-qh2hx\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702391 4783 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702402 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702410 4783 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/59b335c6-05a6-4996-8412-8d06804cb213-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702419 4783 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/59b335c6-05a6-4996-8412-8d06804cb213-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702427 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.702435 4783 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/59b335c6-05a6-4996-8412-8d06804cb213-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.724284 4783 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.724861 4783 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975") on node "crc" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.737919 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "59b335c6-05a6-4996-8412-8d06804cb213" (UID: "59b335c6-05a6-4996-8412-8d06804cb213"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.803804 4783 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/59b335c6-05a6-4996-8412-8d06804cb213-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.803883 4783 reconciler_common.go:293] "Volume detached for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.854680 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ebf1c70-b736-461b-9f07-449542b8c622" path="/var/lib/kubelet/pods/4ebf1c70-b736-461b-9f07-449542b8c622/volumes" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.855268 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed77d292-efda-4cef-9290-e24de945ed95" path="/var/lib/kubelet/pods/ed77d292-efda-4cef-9290-e24de945ed95/volumes" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.908881 4783 generic.go:334] "Generic (PLEG): container finished" podID="59b335c6-05a6-4996-8412-8d06804cb213" containerID="fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a" exitCode=0 Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.908917 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerDied","Data":"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a"} Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.908960 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"59b335c6-05a6-4996-8412-8d06804cb213","Type":"ContainerDied","Data":"0fa8fd85d9edc181a24939e97ed582dac90fa570ead01a8cdb2b611bba938134"} Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.908961 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.908979 4783 scope.go:117] "RemoveContainer" containerID="fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.956934 4783 scope.go:117] "RemoveContainer" containerID="019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.973610 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 14:59:28 crc kubenswrapper[4783]: W0930 14:59:28.990845 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded3bb681_a6c6_4336_8664_36e153896c36.slice/crio-281dc83ad667b71431b6ed226a0110afc443e03f4dae515a453187069a4698ee WatchSource:0}: Error finding container 281dc83ad667b71431b6ed226a0110afc443e03f4dae515a453187069a4698ee: Status 404 returned error can't find the container with id 281dc83ad667b71431b6ed226a0110afc443e03f4dae515a453187069a4698ee Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.992371 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.994064 4783 scope.go:117] "RemoveContainer" containerID="fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a" Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.994420 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a\": container with ID starting with fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a not found: ID does not exist" containerID="fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.994462 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a"} err="failed to get container status \"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a\": rpc error: code = NotFound desc = could not find container \"fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a\": container with ID starting with fde3854c29c6242ca34c1b3b7778177daf71b171f155897f7f8cc36b78d4e12a not found: ID does not exist" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.994491 4783 scope.go:117] "RemoveContainer" containerID="019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6" Sep 30 14:59:28 crc kubenswrapper[4783]: E0930 14:59:28.994686 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6\": container with ID starting with 019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6 not found: ID does not exist" containerID="019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.994712 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6"} err="failed to get container status \"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6\": rpc error: code = NotFound desc = could not find container \"019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6\": container with ID starting with 019ac923c775f4697cdbaa20ddd0d7b81a2db92605def2c68a3c026a83ea61b6 not found: ID does not exist" Sep 30 14:59:28 crc kubenswrapper[4783]: I0930 14:59:28.999868 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.013271 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:29 crc kubenswrapper[4783]: E0930 14:59:29.013636 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="rabbitmq" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.013654 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="rabbitmq" Sep 30 14:59:29 crc kubenswrapper[4783]: E0930 14:59:29.013666 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="setup-container" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.013673 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="setup-container" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.013844 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b335c6-05a6-4996-8412-8d06804cb213" containerName="rabbitmq" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.014842 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020086 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020388 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020640 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qc2cw" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020698 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020952 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.021021 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.020960 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.035845 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208161 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208663 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208734 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208758 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208791 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208817 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208845 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208941 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.208996 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st5nc\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-kube-api-access-st5nc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.209047 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.209096 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310606 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310695 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st5nc\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-kube-api-access-st5nc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310749 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310789 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310855 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310883 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310918 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310940 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310965 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.310993 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.311016 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.311536 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.311896 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.313311 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.313793 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.314678 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.317069 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.318097 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.318146 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2337f5b83f2849bc2e06ff73920d7ab842ea8dc6ef7d4167ca212a6758a9a8d4/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.318101 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.319527 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.325644 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.330867 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st5nc\" (UniqueName: \"kubernetes.io/projected/e175dc5c-270b-46c8-b5cb-d95fab2b9a92-kube-api-access-st5nc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.363520 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-193bd3dd-3fc5-4832-bf0b-d57755ce9975\") pod \"rabbitmq-cell1-server-0\" (UID: \"e175dc5c-270b-46c8-b5cb-d95fab2b9a92\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.430877 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.854707 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 14:59:29 crc kubenswrapper[4783]: W0930 14:59:29.865432 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode175dc5c_270b_46c8_b5cb_d95fab2b9a92.slice/crio-dcd1cfca5a6bae7296eab62c131c6c3dbf85e1ccce6fc9887ef2432a576ea876 WatchSource:0}: Error finding container dcd1cfca5a6bae7296eab62c131c6c3dbf85e1ccce6fc9887ef2432a576ea876: Status 404 returned error can't find the container with id dcd1cfca5a6bae7296eab62c131c6c3dbf85e1ccce6fc9887ef2432a576ea876 Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.918317 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e175dc5c-270b-46c8-b5cb-d95fab2b9a92","Type":"ContainerStarted","Data":"dcd1cfca5a6bae7296eab62c131c6c3dbf85e1ccce6fc9887ef2432a576ea876"} Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.921417 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerStarted","Data":"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f"} Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.922459 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ed3bb681-a6c6-4336-8664-36e153896c36","Type":"ContainerStarted","Data":"0b55487d1d21768803c62df09cff2931d664e93c74ab169afb9ac722745b81eb"} Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.922486 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ed3bb681-a6c6-4336-8664-36e153896c36","Type":"ContainerStarted","Data":"281dc83ad667b71431b6ed226a0110afc443e03f4dae515a453187069a4698ee"} Sep 30 14:59:29 crc kubenswrapper[4783]: I0930 14:59:29.940851 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xxcsh" podStartSLOduration=1.945250041 podStartE2EDuration="4.940830979s" podCreationTimestamp="2025-09-30 14:59:25 +0000 UTC" firstStartedPulling="2025-09-30 14:59:25.869915483 +0000 UTC m=+5065.801381790" lastFinishedPulling="2025-09-30 14:59:28.865496431 +0000 UTC m=+5068.796962728" observedRunningTime="2025-09-30 14:59:29.93677384 +0000 UTC m=+5069.868240157" watchObservedRunningTime="2025-09-30 14:59:29.940830979 +0000 UTC m=+5069.872297286" Sep 30 14:59:30 crc kubenswrapper[4783]: I0930 14:59:30.851551 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b335c6-05a6-4996-8412-8d06804cb213" path="/var/lib/kubelet/pods/59b335c6-05a6-4996-8412-8d06804cb213/volumes" Sep 30 14:59:30 crc kubenswrapper[4783]: I0930 14:59:30.929552 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e175dc5c-270b-46c8-b5cb-d95fab2b9a92","Type":"ContainerStarted","Data":"9e7b77b1e77034a54fad9a41bbfcf42647c8315c832f0bbb68a1ddd88624af04"} Sep 30 14:59:35 crc kubenswrapper[4783]: I0930 14:59:35.371733 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:35 crc kubenswrapper[4783]: I0930 14:59:35.373059 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:35 crc kubenswrapper[4783]: I0930 14:59:35.428297 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:36 crc kubenswrapper[4783]: I0930 14:59:36.023057 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:36 crc kubenswrapper[4783]: I0930 14:59:36.086985 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:37 crc kubenswrapper[4783]: I0930 14:59:37.674325 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 14:59:37 crc kubenswrapper[4783]: I0930 14:59:37.674403 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 14:59:37 crc kubenswrapper[4783]: I0930 14:59:37.991029 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xxcsh" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="registry-server" containerID="cri-o://f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f" gracePeriod=2 Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.353197 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.455888 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities\") pod \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.456037 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content\") pod \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.456148 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dtdj\" (UniqueName: \"kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj\") pod \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\" (UID: \"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4\") " Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.457088 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities" (OuterVolumeSpecName: "utilities") pod "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" (UID: "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.462623 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj" (OuterVolumeSpecName: "kube-api-access-5dtdj") pod "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" (UID: "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4"). InnerVolumeSpecName "kube-api-access-5dtdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.472212 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" (UID: "3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.557659 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dtdj\" (UniqueName: \"kubernetes.io/projected/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-kube-api-access-5dtdj\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.557687 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.557700 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.999797 4783 generic.go:334] "Generic (PLEG): container finished" podID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerID="f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f" exitCode=0 Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.999860 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xxcsh" Sep 30 14:59:38 crc kubenswrapper[4783]: I0930 14:59:38.999881 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerDied","Data":"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f"} Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:38.999943 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xxcsh" event={"ID":"3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4","Type":"ContainerDied","Data":"285b009d3379ea58f4571feb496aa208b48e95fca57bff69297b17e375288212"} Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:38.999967 4783 scope.go:117] "RemoveContainer" containerID="f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.018894 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.024480 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xxcsh"] Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.029060 4783 scope.go:117] "RemoveContainer" containerID="34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.044747 4783 scope.go:117] "RemoveContainer" containerID="3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.081117 4783 scope.go:117] "RemoveContainer" containerID="f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f" Sep 30 14:59:39 crc kubenswrapper[4783]: E0930 14:59:39.081605 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f\": container with ID starting with f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f not found: ID does not exist" containerID="f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.081650 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f"} err="failed to get container status \"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f\": rpc error: code = NotFound desc = could not find container \"f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f\": container with ID starting with f587ba04de2d12f44d2e51614b889525abde440abfdb4c95f34baaa2ec9c405f not found: ID does not exist" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.081680 4783 scope.go:117] "RemoveContainer" containerID="34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e" Sep 30 14:59:39 crc kubenswrapper[4783]: E0930 14:59:39.082042 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e\": container with ID starting with 34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e not found: ID does not exist" containerID="34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.082081 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e"} err="failed to get container status \"34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e\": rpc error: code = NotFound desc = could not find container \"34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e\": container with ID starting with 34729c004557f934c3a46177d6ad53b73ee73d17229e3e1e0c6bba0bd7ed3c2e not found: ID does not exist" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.082109 4783 scope.go:117] "RemoveContainer" containerID="3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e" Sep 30 14:59:39 crc kubenswrapper[4783]: E0930 14:59:39.082517 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e\": container with ID starting with 3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e not found: ID does not exist" containerID="3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e" Sep 30 14:59:39 crc kubenswrapper[4783]: I0930 14:59:39.082542 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e"} err="failed to get container status \"3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e\": rpc error: code = NotFound desc = could not find container \"3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e\": container with ID starting with 3f6f897a057bd5c66a16cf1d94b51e613831531ce94251d4f54cd203dc25601e not found: ID does not exist" Sep 30 14:59:40 crc kubenswrapper[4783]: I0930 14:59:40.858889 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" path="/var/lib/kubelet/pods/3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4/volumes" Sep 30 14:59:58 crc kubenswrapper[4783]: I0930 14:59:58.935097 4783 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod59b335c6-05a6-4996-8412-8d06804cb213"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod59b335c6-05a6-4996-8412-8d06804cb213] : Timed out while waiting for systemd to remove kubepods-burstable-pod59b335c6_05a6_4996_8412_8d06804cb213.slice" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.156735 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp"] Sep 30 15:00:00 crc kubenswrapper[4783]: E0930 15:00:00.157305 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="extract-utilities" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.157317 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="extract-utilities" Sep 30 15:00:00 crc kubenswrapper[4783]: E0930 15:00:00.157336 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="extract-content" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.157343 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="extract-content" Sep 30 15:00:00 crc kubenswrapper[4783]: E0930 15:00:00.157355 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="registry-server" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.157361 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="registry-server" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.157504 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec8eb44-7d5c-474e-a14b-e5fbae2f17a4" containerName="registry-server" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.158005 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.160901 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.162571 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.165299 4783 generic.go:334] "Generic (PLEG): container finished" podID="ed3bb681-a6c6-4336-8664-36e153896c36" containerID="0b55487d1d21768803c62df09cff2931d664e93c74ab169afb9ac722745b81eb" exitCode=0 Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.165371 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ed3bb681-a6c6-4336-8664-36e153896c36","Type":"ContainerDied","Data":"0b55487d1d21768803c62df09cff2931d664e93c74ab169afb9ac722745b81eb"} Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.176710 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp"] Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.195713 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n79j8\" (UniqueName: \"kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.195785 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.195884 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.297330 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n79j8\" (UniqueName: \"kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.298436 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.298887 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.299913 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.304014 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.313522 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n79j8\" (UniqueName: \"kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8\") pod \"collect-profiles-29320740-rpsrp\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.591892 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:00 crc kubenswrapper[4783]: I0930 15:00:00.825785 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp"] Sep 30 15:00:00 crc kubenswrapper[4783]: W0930 15:00:00.870272 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26a16a3a_e2c0_434c_a259_b092947c3a38.slice/crio-739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4 WatchSource:0}: Error finding container 739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4: Status 404 returned error can't find the container with id 739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4 Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.177422 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" event={"ID":"26a16a3a-e2c0-434c-a259-b092947c3a38","Type":"ContainerStarted","Data":"2cff7f309622fb125d3344585e5b1928907a546f2d144595280131991b82cb62"} Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.177526 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" event={"ID":"26a16a3a-e2c0-434c-a259-b092947c3a38","Type":"ContainerStarted","Data":"739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4"} Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.180391 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ed3bb681-a6c6-4336-8664-36e153896c36","Type":"ContainerStarted","Data":"3e713570eb7bde745188168923d2b881ccdfc290b0b209462bd0c46997bb3b31"} Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.181193 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.182593 4783 generic.go:334] "Generic (PLEG): container finished" podID="e175dc5c-270b-46c8-b5cb-d95fab2b9a92" containerID="9e7b77b1e77034a54fad9a41bbfcf42647c8315c832f0bbb68a1ddd88624af04" exitCode=0 Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.182695 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e175dc5c-270b-46c8-b5cb-d95fab2b9a92","Type":"ContainerDied","Data":"9e7b77b1e77034a54fad9a41bbfcf42647c8315c832f0bbb68a1ddd88624af04"} Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.241724 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.241696219 podStartE2EDuration="34.241696219s" podCreationTimestamp="2025-09-30 14:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:00:01.234958563 +0000 UTC m=+5101.166424880" watchObservedRunningTime="2025-09-30 15:00:01.241696219 +0000 UTC m=+5101.173162536" Sep 30 15:00:01 crc kubenswrapper[4783]: I0930 15:00:01.248521 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" podStartSLOduration=1.248491286 podStartE2EDuration="1.248491286s" podCreationTimestamp="2025-09-30 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:00:01.205346005 +0000 UTC m=+5101.136812322" watchObservedRunningTime="2025-09-30 15:00:01.248491286 +0000 UTC m=+5101.179957603" Sep 30 15:00:02 crc kubenswrapper[4783]: I0930 15:00:02.193075 4783 generic.go:334] "Generic (PLEG): container finished" podID="26a16a3a-e2c0-434c-a259-b092947c3a38" containerID="2cff7f309622fb125d3344585e5b1928907a546f2d144595280131991b82cb62" exitCode=0 Sep 30 15:00:02 crc kubenswrapper[4783]: I0930 15:00:02.193154 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" event={"ID":"26a16a3a-e2c0-434c-a259-b092947c3a38","Type":"ContainerDied","Data":"2cff7f309622fb125d3344585e5b1928907a546f2d144595280131991b82cb62"} Sep 30 15:00:02 crc kubenswrapper[4783]: I0930 15:00:02.196761 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e175dc5c-270b-46c8-b5cb-d95fab2b9a92","Type":"ContainerStarted","Data":"e95361e2a222657a96ba2469803bd5cfa948bfde70313ae9b5d45bf1fe206cba"} Sep 30 15:00:02 crc kubenswrapper[4783]: I0930 15:00:02.197297 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.509248 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.524556 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=35.524538697 podStartE2EDuration="35.524538697s" podCreationTimestamp="2025-09-30 14:59:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:00:02.247441329 +0000 UTC m=+5102.178907636" watchObservedRunningTime="2025-09-30 15:00:03.524538697 +0000 UTC m=+5103.456005004" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.654969 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n79j8\" (UniqueName: \"kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8\") pod \"26a16a3a-e2c0-434c-a259-b092947c3a38\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.655020 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume\") pod \"26a16a3a-e2c0-434c-a259-b092947c3a38\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.655762 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume" (OuterVolumeSpecName: "config-volume") pod "26a16a3a-e2c0-434c-a259-b092947c3a38" (UID: "26a16a3a-e2c0-434c-a259-b092947c3a38"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.655169 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume\") pod \"26a16a3a-e2c0-434c-a259-b092947c3a38\" (UID: \"26a16a3a-e2c0-434c-a259-b092947c3a38\") " Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.656344 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26a16a3a-e2c0-434c-a259-b092947c3a38-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.660038 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "26a16a3a-e2c0-434c-a259-b092947c3a38" (UID: "26a16a3a-e2c0-434c-a259-b092947c3a38"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.662490 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8" (OuterVolumeSpecName: "kube-api-access-n79j8") pod "26a16a3a-e2c0-434c-a259-b092947c3a38" (UID: "26a16a3a-e2c0-434c-a259-b092947c3a38"). InnerVolumeSpecName "kube-api-access-n79j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.758446 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n79j8\" (UniqueName: \"kubernetes.io/projected/26a16a3a-e2c0-434c-a259-b092947c3a38-kube-api-access-n79j8\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:03 crc kubenswrapper[4783]: I0930 15:00:03.758500 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26a16a3a-e2c0-434c-a259-b092947c3a38-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.213046 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" event={"ID":"26a16a3a-e2c0-434c-a259-b092947c3a38","Type":"ContainerDied","Data":"739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4"} Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.213116 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="739f261526d4e7784d42a29bbaf0faba8c99cdb3b170ec08feb1aa0430bf86c4" Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.213168 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320740-rpsrp" Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.295291 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr"] Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.300926 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320695-ghknr"] Sep 30 15:00:04 crc kubenswrapper[4783]: I0930 15:00:04.853188 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a311fff-dc1c-46a7-9e74-ee4c03b630b2" path="/var/lib/kubelet/pods/4a311fff-dc1c-46a7-9e74-ee4c03b630b2/volumes" Sep 30 15:00:07 crc kubenswrapper[4783]: I0930 15:00:07.674569 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:00:07 crc kubenswrapper[4783]: I0930 15:00:07.674906 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:00:18 crc kubenswrapper[4783]: I0930 15:00:18.474543 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 15:00:18 crc kubenswrapper[4783]: I0930 15:00:18.832172 4783 scope.go:117] "RemoveContainer" containerID="997d648a041d3101eed6b0cfe6096fad2ee17ee6880115ad6af411abea50eebe" Sep 30 15:00:19 crc kubenswrapper[4783]: I0930 15:00:19.434603 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.446426 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 15:00:20 crc kubenswrapper[4783]: E0930 15:00:20.446862 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26a16a3a-e2c0-434c-a259-b092947c3a38" containerName="collect-profiles" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.446881 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="26a16a3a-e2c0-434c-a259-b092947c3a38" containerName="collect-profiles" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.447177 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="26a16a3a-e2c0-434c-a259-b092947c3a38" containerName="collect-profiles" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.447995 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.450145 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-k622k" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.454678 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.517044 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mscnt\" (UniqueName: \"kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt\") pod \"mariadb-client-1-default\" (UID: \"07429c36-5065-4ede-aa51-f6bcd146dfc2\") " pod="openstack/mariadb-client-1-default" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.618148 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mscnt\" (UniqueName: \"kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt\") pod \"mariadb-client-1-default\" (UID: \"07429c36-5065-4ede-aa51-f6bcd146dfc2\") " pod="openstack/mariadb-client-1-default" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.640843 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mscnt\" (UniqueName: \"kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt\") pod \"mariadb-client-1-default\" (UID: \"07429c36-5065-4ede-aa51-f6bcd146dfc2\") " pod="openstack/mariadb-client-1-default" Sep 30 15:00:20 crc kubenswrapper[4783]: I0930 15:00:20.769628 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 15:00:21 crc kubenswrapper[4783]: I0930 15:00:21.290151 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 15:00:21 crc kubenswrapper[4783]: W0930 15:00:21.296171 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07429c36_5065_4ede_aa51_f6bcd146dfc2.slice/crio-a7bade297359c92684cf01c8129bf985fd6ce024c4a319c36dafdc769620c4e2 WatchSource:0}: Error finding container a7bade297359c92684cf01c8129bf985fd6ce024c4a319c36dafdc769620c4e2: Status 404 returned error can't find the container with id a7bade297359c92684cf01c8129bf985fd6ce024c4a319c36dafdc769620c4e2 Sep 30 15:00:21 crc kubenswrapper[4783]: I0930 15:00:21.341181 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"07429c36-5065-4ede-aa51-f6bcd146dfc2","Type":"ContainerStarted","Data":"a7bade297359c92684cf01c8129bf985fd6ce024c4a319c36dafdc769620c4e2"} Sep 30 15:00:23 crc kubenswrapper[4783]: I0930 15:00:23.362421 4783 generic.go:334] "Generic (PLEG): container finished" podID="07429c36-5065-4ede-aa51-f6bcd146dfc2" containerID="d1731c62c6cc3f5285771443bf5f9f0ebf28266664d8114aae801b325a251504" exitCode=0 Sep 30 15:00:23 crc kubenswrapper[4783]: I0930 15:00:23.362535 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"07429c36-5065-4ede-aa51-f6bcd146dfc2","Type":"ContainerDied","Data":"d1731c62c6cc3f5285771443bf5f9f0ebf28266664d8114aae801b325a251504"} Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.767700 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.796537 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_07429c36-5065-4ede-aa51-f6bcd146dfc2/mariadb-client-1-default/0.log" Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.819110 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.824774 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.886201 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mscnt\" (UniqueName: \"kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt\") pod \"07429c36-5065-4ede-aa51-f6bcd146dfc2\" (UID: \"07429c36-5065-4ede-aa51-f6bcd146dfc2\") " Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.891370 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt" (OuterVolumeSpecName: "kube-api-access-mscnt") pod "07429c36-5065-4ede-aa51-f6bcd146dfc2" (UID: "07429c36-5065-4ede-aa51-f6bcd146dfc2"). InnerVolumeSpecName "kube-api-access-mscnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:24 crc kubenswrapper[4783]: I0930 15:00:24.987352 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mscnt\" (UniqueName: \"kubernetes.io/projected/07429c36-5065-4ede-aa51-f6bcd146dfc2-kube-api-access-mscnt\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.304589 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 15:00:25 crc kubenswrapper[4783]: E0930 15:00:25.305094 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07429c36-5065-4ede-aa51-f6bcd146dfc2" containerName="mariadb-client-1-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.305119 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="07429c36-5065-4ede-aa51-f6bcd146dfc2" containerName="mariadb-client-1-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.305375 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="07429c36-5065-4ede-aa51-f6bcd146dfc2" containerName="mariadb-client-1-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.306086 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.330564 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.382110 4783 scope.go:117] "RemoveContainer" containerID="d1731c62c6cc3f5285771443bf5f9f0ebf28266664d8114aae801b325a251504" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.382187 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.394545 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkqrd\" (UniqueName: \"kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd\") pod \"mariadb-client-2-default\" (UID: \"6e519a0e-8593-475f-9835-0a0f9ca21d08\") " pod="openstack/mariadb-client-2-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.497014 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkqrd\" (UniqueName: \"kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd\") pod \"mariadb-client-2-default\" (UID: \"6e519a0e-8593-475f-9835-0a0f9ca21d08\") " pod="openstack/mariadb-client-2-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.517178 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkqrd\" (UniqueName: \"kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd\") pod \"mariadb-client-2-default\" (UID: \"6e519a0e-8593-475f-9835-0a0f9ca21d08\") " pod="openstack/mariadb-client-2-default" Sep 30 15:00:25 crc kubenswrapper[4783]: I0930 15:00:25.629505 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 15:00:26 crc kubenswrapper[4783]: I0930 15:00:26.161964 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 15:00:26 crc kubenswrapper[4783]: W0930 15:00:26.163119 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e519a0e_8593_475f_9835_0a0f9ca21d08.slice/crio-40e432c8969c5bf2edd2ffd19dc0eae2c7be69db971126559dc73c12935a371c WatchSource:0}: Error finding container 40e432c8969c5bf2edd2ffd19dc0eae2c7be69db971126559dc73c12935a371c: Status 404 returned error can't find the container with id 40e432c8969c5bf2edd2ffd19dc0eae2c7be69db971126559dc73c12935a371c Sep 30 15:00:26 crc kubenswrapper[4783]: I0930 15:00:26.394266 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"6e519a0e-8593-475f-9835-0a0f9ca21d08","Type":"ContainerStarted","Data":"40e432c8969c5bf2edd2ffd19dc0eae2c7be69db971126559dc73c12935a371c"} Sep 30 15:00:26 crc kubenswrapper[4783]: I0930 15:00:26.860718 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07429c36-5065-4ede-aa51-f6bcd146dfc2" path="/var/lib/kubelet/pods/07429c36-5065-4ede-aa51-f6bcd146dfc2/volumes" Sep 30 15:00:27 crc kubenswrapper[4783]: I0930 15:00:27.402138 4783 generic.go:334] "Generic (PLEG): container finished" podID="6e519a0e-8593-475f-9835-0a0f9ca21d08" containerID="b352dd1708ffe963c936f96c1dc20d8db78cdea7a42ff7a1f430a571af3a2d66" exitCode=0 Sep 30 15:00:27 crc kubenswrapper[4783]: I0930 15:00:27.402198 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"6e519a0e-8593-475f-9835-0a0f9ca21d08","Type":"ContainerDied","Data":"b352dd1708ffe963c936f96c1dc20d8db78cdea7a42ff7a1f430a571af3a2d66"} Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.799702 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.846068 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_6e519a0e-8593-475f-9835-0a0f9ca21d08/mariadb-client-2-default/0.log" Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.849110 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkqrd\" (UniqueName: \"kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd\") pod \"6e519a0e-8593-475f-9835-0a0f9ca21d08\" (UID: \"6e519a0e-8593-475f-9835-0a0f9ca21d08\") " Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.854251 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd" (OuterVolumeSpecName: "kube-api-access-jkqrd") pod "6e519a0e-8593-475f-9835-0a0f9ca21d08" (UID: "6e519a0e-8593-475f-9835-0a0f9ca21d08"). InnerVolumeSpecName "kube-api-access-jkqrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.875021 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.884549 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 30 15:00:28 crc kubenswrapper[4783]: I0930 15:00:28.952623 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkqrd\" (UniqueName: \"kubernetes.io/projected/6e519a0e-8593-475f-9835-0a0f9ca21d08-kube-api-access-jkqrd\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.319744 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Sep 30 15:00:29 crc kubenswrapper[4783]: E0930 15:00:29.320485 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e519a0e-8593-475f-9835-0a0f9ca21d08" containerName="mariadb-client-2-default" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.320573 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e519a0e-8593-475f-9835-0a0f9ca21d08" containerName="mariadb-client-2-default" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.320863 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e519a0e-8593-475f-9835-0a0f9ca21d08" containerName="mariadb-client-2-default" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.321586 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.331544 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.359726 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pdmg\" (UniqueName: \"kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg\") pod \"mariadb-client-1\" (UID: \"b464dfdc-e5ac-4563-8558-9f77fbebcc40\") " pod="openstack/mariadb-client-1" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.425215 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40e432c8969c5bf2edd2ffd19dc0eae2c7be69db971126559dc73c12935a371c" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.425610 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.461285 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pdmg\" (UniqueName: \"kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg\") pod \"mariadb-client-1\" (UID: \"b464dfdc-e5ac-4563-8558-9f77fbebcc40\") " pod="openstack/mariadb-client-1" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.483451 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pdmg\" (UniqueName: \"kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg\") pod \"mariadb-client-1\" (UID: \"b464dfdc-e5ac-4563-8558-9f77fbebcc40\") " pod="openstack/mariadb-client-1" Sep 30 15:00:29 crc kubenswrapper[4783]: I0930 15:00:29.638888 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 15:00:30 crc kubenswrapper[4783]: I0930 15:00:30.124836 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 15:00:30 crc kubenswrapper[4783]: W0930 15:00:30.132399 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb464dfdc_e5ac_4563_8558_9f77fbebcc40.slice/crio-12a4526a9d45d6d505e525123e79c59e740e6ed6c3c8154acb571082052a5b00 WatchSource:0}: Error finding container 12a4526a9d45d6d505e525123e79c59e740e6ed6c3c8154acb571082052a5b00: Status 404 returned error can't find the container with id 12a4526a9d45d6d505e525123e79c59e740e6ed6c3c8154acb571082052a5b00 Sep 30 15:00:30 crc kubenswrapper[4783]: I0930 15:00:30.433552 4783 generic.go:334] "Generic (PLEG): container finished" podID="b464dfdc-e5ac-4563-8558-9f77fbebcc40" containerID="c41b7ae90cd2eeb11e53e5cfbcc315050dc20d7a6b532932264c216db218f1bf" exitCode=0 Sep 30 15:00:30 crc kubenswrapper[4783]: I0930 15:00:30.433651 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"b464dfdc-e5ac-4563-8558-9f77fbebcc40","Type":"ContainerDied","Data":"c41b7ae90cd2eeb11e53e5cfbcc315050dc20d7a6b532932264c216db218f1bf"} Sep 30 15:00:30 crc kubenswrapper[4783]: I0930 15:00:30.433908 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"b464dfdc-e5ac-4563-8558-9f77fbebcc40","Type":"ContainerStarted","Data":"12a4526a9d45d6d505e525123e79c59e740e6ed6c3c8154acb571082052a5b00"} Sep 30 15:00:30 crc kubenswrapper[4783]: I0930 15:00:30.856511 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e519a0e-8593-475f-9835-0a0f9ca21d08" path="/var/lib/kubelet/pods/6e519a0e-8593-475f-9835-0a0f9ca21d08/volumes" Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.839258 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.855321 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_b464dfdc-e5ac-4563-8558-9f77fbebcc40/mariadb-client-1/0.log" Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.876276 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.880985 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.897625 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pdmg\" (UniqueName: \"kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg\") pod \"b464dfdc-e5ac-4563-8558-9f77fbebcc40\" (UID: \"b464dfdc-e5ac-4563-8558-9f77fbebcc40\") " Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.903527 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg" (OuterVolumeSpecName: "kube-api-access-4pdmg") pod "b464dfdc-e5ac-4563-8558-9f77fbebcc40" (UID: "b464dfdc-e5ac-4563-8558-9f77fbebcc40"). InnerVolumeSpecName "kube-api-access-4pdmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:31 crc kubenswrapper[4783]: I0930 15:00:31.999315 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pdmg\" (UniqueName: \"kubernetes.io/projected/b464dfdc-e5ac-4563-8558-9f77fbebcc40-kube-api-access-4pdmg\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.295469 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 15:00:32 crc kubenswrapper[4783]: E0930 15:00:32.295923 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b464dfdc-e5ac-4563-8558-9f77fbebcc40" containerName="mariadb-client-1" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.295948 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b464dfdc-e5ac-4563-8558-9f77fbebcc40" containerName="mariadb-client-1" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.296152 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b464dfdc-e5ac-4563-8558-9f77fbebcc40" containerName="mariadb-client-1" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.296910 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.309505 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.405932 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x2xt\" (UniqueName: \"kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt\") pod \"mariadb-client-4-default\" (UID: \"6ecea901-cefb-454a-95e7-2575abeef0c3\") " pod="openstack/mariadb-client-4-default" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.453880 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a4526a9d45d6d505e525123e79c59e740e6ed6c3c8154acb571082052a5b00" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.453956 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.508346 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x2xt\" (UniqueName: \"kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt\") pod \"mariadb-client-4-default\" (UID: \"6ecea901-cefb-454a-95e7-2575abeef0c3\") " pod="openstack/mariadb-client-4-default" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.527468 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x2xt\" (UniqueName: \"kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt\") pod \"mariadb-client-4-default\" (UID: \"6ecea901-cefb-454a-95e7-2575abeef0c3\") " pod="openstack/mariadb-client-4-default" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.622669 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 15:00:32 crc kubenswrapper[4783]: I0930 15:00:32.856360 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b464dfdc-e5ac-4563-8558-9f77fbebcc40" path="/var/lib/kubelet/pods/b464dfdc-e5ac-4563-8558-9f77fbebcc40/volumes" Sep 30 15:00:33 crc kubenswrapper[4783]: I0930 15:00:33.168133 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 15:00:33 crc kubenswrapper[4783]: I0930 15:00:33.462762 4783 generic.go:334] "Generic (PLEG): container finished" podID="6ecea901-cefb-454a-95e7-2575abeef0c3" containerID="a142eefc32e30ef4b170b0f51d837aab086b45b10515b5318c56dbb875d44cc4" exitCode=0 Sep 30 15:00:33 crc kubenswrapper[4783]: I0930 15:00:33.462896 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"6ecea901-cefb-454a-95e7-2575abeef0c3","Type":"ContainerDied","Data":"a142eefc32e30ef4b170b0f51d837aab086b45b10515b5318c56dbb875d44cc4"} Sep 30 15:00:33 crc kubenswrapper[4783]: I0930 15:00:33.463386 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"6ecea901-cefb-454a-95e7-2575abeef0c3","Type":"ContainerStarted","Data":"46396b636892cb56409dcc7e4711f236a699036b96762302bfea57f51de51bf1"} Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.892875 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.914032 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_6ecea901-cefb-454a-95e7-2575abeef0c3/mariadb-client-4-default/0.log" Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.952623 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.957260 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x2xt\" (UniqueName: \"kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt\") pod \"6ecea901-cefb-454a-95e7-2575abeef0c3\" (UID: \"6ecea901-cefb-454a-95e7-2575abeef0c3\") " Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.961761 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 30 15:00:34 crc kubenswrapper[4783]: I0930 15:00:34.966102 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt" (OuterVolumeSpecName: "kube-api-access-6x2xt") pod "6ecea901-cefb-454a-95e7-2575abeef0c3" (UID: "6ecea901-cefb-454a-95e7-2575abeef0c3"). InnerVolumeSpecName "kube-api-access-6x2xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:35 crc kubenswrapper[4783]: I0930 15:00:35.059095 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x2xt\" (UniqueName: \"kubernetes.io/projected/6ecea901-cefb-454a-95e7-2575abeef0c3-kube-api-access-6x2xt\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:35 crc kubenswrapper[4783]: I0930 15:00:35.484878 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46396b636892cb56409dcc7e4711f236a699036b96762302bfea57f51de51bf1" Sep 30 15:00:35 crc kubenswrapper[4783]: I0930 15:00:35.484998 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 30 15:00:36 crc kubenswrapper[4783]: I0930 15:00:36.857340 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ecea901-cefb-454a-95e7-2575abeef0c3" path="/var/lib/kubelet/pods/6ecea901-cefb-454a-95e7-2575abeef0c3/volumes" Sep 30 15:00:37 crc kubenswrapper[4783]: I0930 15:00:37.674747 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:00:37 crc kubenswrapper[4783]: I0930 15:00:37.674874 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:00:37 crc kubenswrapper[4783]: I0930 15:00:37.674944 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 15:00:37 crc kubenswrapper[4783]: I0930 15:00:37.676162 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 15:00:37 crc kubenswrapper[4783]: I0930 15:00:37.676307 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" gracePeriod=600 Sep 30 15:00:37 crc kubenswrapper[4783]: E0930 15:00:37.802482 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:00:38 crc kubenswrapper[4783]: I0930 15:00:38.513180 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" exitCode=0 Sep 30 15:00:38 crc kubenswrapper[4783]: I0930 15:00:38.513265 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06"} Sep 30 15:00:38 crc kubenswrapper[4783]: I0930 15:00:38.513322 4783 scope.go:117] "RemoveContainer" containerID="e4bf56c617529c86546c173be89686625c4a0389c7bae70cacf09b2d80df50d4" Sep 30 15:00:38 crc kubenswrapper[4783]: I0930 15:00:38.513806 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:00:38 crc kubenswrapper[4783]: E0930 15:00:38.514136 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.232549 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 15:00:39 crc kubenswrapper[4783]: E0930 15:00:39.232971 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecea901-cefb-454a-95e7-2575abeef0c3" containerName="mariadb-client-4-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.232994 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecea901-cefb-454a-95e7-2575abeef0c3" containerName="mariadb-client-4-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.233201 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecea901-cefb-454a-95e7-2575abeef0c3" containerName="mariadb-client-4-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.233833 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.238763 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.238869 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-k622k" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.325328 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js6d7\" (UniqueName: \"kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7\") pod \"mariadb-client-5-default\" (UID: \"0e6375d6-1170-409e-950a-10784c1ebb3a\") " pod="openstack/mariadb-client-5-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.427488 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js6d7\" (UniqueName: \"kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7\") pod \"mariadb-client-5-default\" (UID: \"0e6375d6-1170-409e-950a-10784c1ebb3a\") " pod="openstack/mariadb-client-5-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.449820 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js6d7\" (UniqueName: \"kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7\") pod \"mariadb-client-5-default\" (UID: \"0e6375d6-1170-409e-950a-10784c1ebb3a\") " pod="openstack/mariadb-client-5-default" Sep 30 15:00:39 crc kubenswrapper[4783]: I0930 15:00:39.560272 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 15:00:40 crc kubenswrapper[4783]: I0930 15:00:40.060549 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 15:00:40 crc kubenswrapper[4783]: I0930 15:00:40.531159 4783 generic.go:334] "Generic (PLEG): container finished" podID="0e6375d6-1170-409e-950a-10784c1ebb3a" containerID="9e1a8c7269f4c394d200b1aec28eaf63800e57021d525e2e45d8dc4f1017b613" exitCode=0 Sep 30 15:00:40 crc kubenswrapper[4783]: I0930 15:00:40.531202 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"0e6375d6-1170-409e-950a-10784c1ebb3a","Type":"ContainerDied","Data":"9e1a8c7269f4c394d200b1aec28eaf63800e57021d525e2e45d8dc4f1017b613"} Sep 30 15:00:40 crc kubenswrapper[4783]: I0930 15:00:40.531243 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"0e6375d6-1170-409e-950a-10784c1ebb3a","Type":"ContainerStarted","Data":"c2932f49a2a788e60decc43cd9d99cabeefca9cc7eba680bc142089a3fe88ed4"} Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.872347 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.935782 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_0e6375d6-1170-409e-950a-10784c1ebb3a/mariadb-client-5-default/0.log" Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.964469 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.966294 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js6d7\" (UniqueName: \"kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7\") pod \"0e6375d6-1170-409e-950a-10784c1ebb3a\" (UID: \"0e6375d6-1170-409e-950a-10784c1ebb3a\") " Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.971753 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7" (OuterVolumeSpecName: "kube-api-access-js6d7") pod "0e6375d6-1170-409e-950a-10784c1ebb3a" (UID: "0e6375d6-1170-409e-950a-10784c1ebb3a"). InnerVolumeSpecName "kube-api-access-js6d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:41 crc kubenswrapper[4783]: I0930 15:00:41.972378 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.068007 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js6d7\" (UniqueName: \"kubernetes.io/projected/0e6375d6-1170-409e-950a-10784c1ebb3a-kube-api-access-js6d7\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.095286 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 15:00:42 crc kubenswrapper[4783]: E0930 15:00:42.095586 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6375d6-1170-409e-950a-10784c1ebb3a" containerName="mariadb-client-5-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.095601 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6375d6-1170-409e-950a-10784c1ebb3a" containerName="mariadb-client-5-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.095762 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6375d6-1170-409e-950a-10784c1ebb3a" containerName="mariadb-client-5-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.096262 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.102511 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.169359 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v62bp\" (UniqueName: \"kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp\") pod \"mariadb-client-6-default\" (UID: \"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05\") " pod="openstack/mariadb-client-6-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.271410 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v62bp\" (UniqueName: \"kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp\") pod \"mariadb-client-6-default\" (UID: \"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05\") " pod="openstack/mariadb-client-6-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.292579 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v62bp\" (UniqueName: \"kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp\") pod \"mariadb-client-6-default\" (UID: \"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05\") " pod="openstack/mariadb-client-6-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.417247 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.545053 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2932f49a2a788e60decc43cd9d99cabeefca9cc7eba680bc142089a3fe88ed4" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.545093 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.853872 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6375d6-1170-409e-950a-10784c1ebb3a" path="/var/lib/kubelet/pods/0e6375d6-1170-409e-950a-10784c1ebb3a/volumes" Sep 30 15:00:42 crc kubenswrapper[4783]: I0930 15:00:42.910194 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 15:00:42 crc kubenswrapper[4783]: W0930 15:00:42.919173 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba805f2d_be6a_4061_9eeb_d6b65c1d9a05.slice/crio-32fbb08170186730aa0ac32a175b40b947c5a70133ae203b53529e21d50d4c80 WatchSource:0}: Error finding container 32fbb08170186730aa0ac32a175b40b947c5a70133ae203b53529e21d50d4c80: Status 404 returned error can't find the container with id 32fbb08170186730aa0ac32a175b40b947c5a70133ae203b53529e21d50d4c80 Sep 30 15:00:43 crc kubenswrapper[4783]: I0930 15:00:43.557883 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05","Type":"ContainerStarted","Data":"8aaa93107d3437ed7447efad8197bb9e8a99539dc42ac48ee7aa93c781f9c4e4"} Sep 30 15:00:43 crc kubenswrapper[4783]: I0930 15:00:43.557983 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05","Type":"ContainerStarted","Data":"32fbb08170186730aa0ac32a175b40b947c5a70133ae203b53529e21d50d4c80"} Sep 30 15:00:43 crc kubenswrapper[4783]: I0930 15:00:43.578616 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.5785900050000001 podStartE2EDuration="1.578590005s" podCreationTimestamp="2025-09-30 15:00:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:00:43.577131389 +0000 UTC m=+5143.508597796" watchObservedRunningTime="2025-09-30 15:00:43.578590005 +0000 UTC m=+5143.510056322" Sep 30 15:00:43 crc kubenswrapper[4783]: I0930 15:00:43.689806 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_ba805f2d-be6a-4061-9eeb-d6b65c1d9a05/mariadb-client-6-default/0.log" Sep 30 15:00:44 crc kubenswrapper[4783]: I0930 15:00:44.570196 4783 generic.go:334] "Generic (PLEG): container finished" podID="ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" containerID="8aaa93107d3437ed7447efad8197bb9e8a99539dc42ac48ee7aa93c781f9c4e4" exitCode=0 Sep 30 15:00:44 crc kubenswrapper[4783]: I0930 15:00:44.570307 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05","Type":"ContainerDied","Data":"8aaa93107d3437ed7447efad8197bb9e8a99539dc42ac48ee7aa93c781f9c4e4"} Sep 30 15:00:45 crc kubenswrapper[4783]: I0930 15:00:45.965658 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.005999 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.010778 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.032755 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v62bp\" (UniqueName: \"kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp\") pod \"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05\" (UID: \"ba805f2d-be6a-4061-9eeb-d6b65c1d9a05\") " Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.039968 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp" (OuterVolumeSpecName: "kube-api-access-v62bp") pod "ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" (UID: "ba805f2d-be6a-4061-9eeb-d6b65c1d9a05"). InnerVolumeSpecName "kube-api-access-v62bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.134891 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v62bp\" (UniqueName: \"kubernetes.io/projected/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05-kube-api-access-v62bp\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.162295 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 15:00:46 crc kubenswrapper[4783]: E0930 15:00:46.162778 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" containerName="mariadb-client-6-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.162798 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" containerName="mariadb-client-6-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.163077 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" containerName="mariadb-client-6-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.163901 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.168818 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.235728 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w48fb\" (UniqueName: \"kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb\") pod \"mariadb-client-7-default\" (UID: \"dcbf2247-a300-40b1-a3ac-0744afd9413c\") " pod="openstack/mariadb-client-7-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.337675 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w48fb\" (UniqueName: \"kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb\") pod \"mariadb-client-7-default\" (UID: \"dcbf2247-a300-40b1-a3ac-0744afd9413c\") " pod="openstack/mariadb-client-7-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.353719 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w48fb\" (UniqueName: \"kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb\") pod \"mariadb-client-7-default\" (UID: \"dcbf2247-a300-40b1-a3ac-0744afd9413c\") " pod="openstack/mariadb-client-7-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.500202 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.592524 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32fbb08170186730aa0ac32a175b40b947c5a70133ae203b53529e21d50d4c80" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.592596 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 30 15:00:46 crc kubenswrapper[4783]: I0930 15:00:46.855976 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba805f2d-be6a-4061-9eeb-d6b65c1d9a05" path="/var/lib/kubelet/pods/ba805f2d-be6a-4061-9eeb-d6b65c1d9a05/volumes" Sep 30 15:00:47 crc kubenswrapper[4783]: I0930 15:00:47.093620 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 15:00:47 crc kubenswrapper[4783]: I0930 15:00:47.602279 4783 generic.go:334] "Generic (PLEG): container finished" podID="dcbf2247-a300-40b1-a3ac-0744afd9413c" containerID="07c04b2f4764660ea53a809cd72f8b9543b24c03bcdfec5241fc510a5db69a46" exitCode=0 Sep 30 15:00:47 crc kubenswrapper[4783]: I0930 15:00:47.602341 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"dcbf2247-a300-40b1-a3ac-0744afd9413c","Type":"ContainerDied","Data":"07c04b2f4764660ea53a809cd72f8b9543b24c03bcdfec5241fc510a5db69a46"} Sep 30 15:00:47 crc kubenswrapper[4783]: I0930 15:00:47.602382 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"dcbf2247-a300-40b1-a3ac-0744afd9413c","Type":"ContainerStarted","Data":"c05c58a9e7ddcf5ebd6a1123f9eebcde45c41b50943559fa7a4ac87dd5856856"} Sep 30 15:00:48 crc kubenswrapper[4783]: I0930 15:00:48.998204 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.016290 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_dcbf2247-a300-40b1-a3ac-0744afd9413c/mariadb-client-7-default/0.log" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.038414 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.043005 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.104088 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w48fb\" (UniqueName: \"kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb\") pod \"dcbf2247-a300-40b1-a3ac-0744afd9413c\" (UID: \"dcbf2247-a300-40b1-a3ac-0744afd9413c\") " Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.109419 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb" (OuterVolumeSpecName: "kube-api-access-w48fb") pod "dcbf2247-a300-40b1-a3ac-0744afd9413c" (UID: "dcbf2247-a300-40b1-a3ac-0744afd9413c"). InnerVolumeSpecName "kube-api-access-w48fb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.177795 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Sep 30 15:00:49 crc kubenswrapper[4783]: E0930 15:00:49.178164 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcbf2247-a300-40b1-a3ac-0744afd9413c" containerName="mariadb-client-7-default" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.178187 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcbf2247-a300-40b1-a3ac-0744afd9413c" containerName="mariadb-client-7-default" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.178390 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcbf2247-a300-40b1-a3ac-0744afd9413c" containerName="mariadb-client-7-default" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.178968 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.187734 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.207416 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w48fb\" (UniqueName: \"kubernetes.io/projected/dcbf2247-a300-40b1-a3ac-0744afd9413c-kube-api-access-w48fb\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.308453 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghf99\" (UniqueName: \"kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99\") pod \"mariadb-client-2\" (UID: \"330bc6ea-a503-4b9d-8065-639461d0fffa\") " pod="openstack/mariadb-client-2" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.409929 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghf99\" (UniqueName: \"kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99\") pod \"mariadb-client-2\" (UID: \"330bc6ea-a503-4b9d-8065-639461d0fffa\") " pod="openstack/mariadb-client-2" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.428533 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghf99\" (UniqueName: \"kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99\") pod \"mariadb-client-2\" (UID: \"330bc6ea-a503-4b9d-8065-639461d0fffa\") " pod="openstack/mariadb-client-2" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.508164 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.619966 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c05c58a9e7ddcf5ebd6a1123f9eebcde45c41b50943559fa7a4ac87dd5856856" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.620036 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 30 15:00:49 crc kubenswrapper[4783]: I0930 15:00:49.993937 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 15:00:49 crc kubenswrapper[4783]: W0930 15:00:49.998587 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330bc6ea_a503_4b9d_8065_639461d0fffa.slice/crio-ee1c0e5d8b4c18df87b311837badc9d5c097d63d7466ff1dedd2caabd194235f WatchSource:0}: Error finding container ee1c0e5d8b4c18df87b311837badc9d5c097d63d7466ff1dedd2caabd194235f: Status 404 returned error can't find the container with id ee1c0e5d8b4c18df87b311837badc9d5c097d63d7466ff1dedd2caabd194235f Sep 30 15:00:50 crc kubenswrapper[4783]: I0930 15:00:50.631730 4783 generic.go:334] "Generic (PLEG): container finished" podID="330bc6ea-a503-4b9d-8065-639461d0fffa" containerID="fe6e7736fc2b95310bd323ad89eb8c190828d40000212a84e0be4d11f12a4af4" exitCode=0 Sep 30 15:00:50 crc kubenswrapper[4783]: I0930 15:00:50.631838 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"330bc6ea-a503-4b9d-8065-639461d0fffa","Type":"ContainerDied","Data":"fe6e7736fc2b95310bd323ad89eb8c190828d40000212a84e0be4d11f12a4af4"} Sep 30 15:00:50 crc kubenswrapper[4783]: I0930 15:00:50.632253 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"330bc6ea-a503-4b9d-8065-639461d0fffa","Type":"ContainerStarted","Data":"ee1c0e5d8b4c18df87b311837badc9d5c097d63d7466ff1dedd2caabd194235f"} Sep 30 15:00:50 crc kubenswrapper[4783]: I0930 15:00:50.862301 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcbf2247-a300-40b1-a3ac-0744afd9413c" path="/var/lib/kubelet/pods/dcbf2247-a300-40b1-a3ac-0744afd9413c/volumes" Sep 30 15:00:51 crc kubenswrapper[4783]: I0930 15:00:51.844023 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:00:51 crc kubenswrapper[4783]: E0930 15:00:51.844598 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.003302 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.020567 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_330bc6ea-a503-4b9d-8065-639461d0fffa/mariadb-client-2/0.log" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.046243 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.051611 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.163618 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghf99\" (UniqueName: \"kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99\") pod \"330bc6ea-a503-4b9d-8065-639461d0fffa\" (UID: \"330bc6ea-a503-4b9d-8065-639461d0fffa\") " Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.170508 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99" (OuterVolumeSpecName: "kube-api-access-ghf99") pod "330bc6ea-a503-4b9d-8065-639461d0fffa" (UID: "330bc6ea-a503-4b9d-8065-639461d0fffa"). InnerVolumeSpecName "kube-api-access-ghf99". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.265194 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghf99\" (UniqueName: \"kubernetes.io/projected/330bc6ea-a503-4b9d-8065-639461d0fffa-kube-api-access-ghf99\") on node \"crc\" DevicePath \"\"" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.650834 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee1c0e5d8b4c18df87b311837badc9d5c097d63d7466ff1dedd2caabd194235f" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.659073 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 30 15:00:52 crc kubenswrapper[4783]: I0930 15:00:52.854959 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="330bc6ea-a503-4b9d-8065-639461d0fffa" path="/var/lib/kubelet/pods/330bc6ea-a503-4b9d-8065-639461d0fffa/volumes" Sep 30 15:01:04 crc kubenswrapper[4783]: I0930 15:01:04.843901 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:01:04 crc kubenswrapper[4783]: E0930 15:01:04.844865 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:01:18 crc kubenswrapper[4783]: I0930 15:01:18.843375 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:01:18 crc kubenswrapper[4783]: E0930 15:01:18.844252 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:01:32 crc kubenswrapper[4783]: I0930 15:01:32.844196 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:01:32 crc kubenswrapper[4783]: E0930 15:01:32.845021 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.109853 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:41 crc kubenswrapper[4783]: E0930 15:01:41.111917 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330bc6ea-a503-4b9d-8065-639461d0fffa" containerName="mariadb-client-2" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.112007 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="330bc6ea-a503-4b9d-8065-639461d0fffa" containerName="mariadb-client-2" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.112301 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="330bc6ea-a503-4b9d-8065-639461d0fffa" containerName="mariadb-client-2" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.113648 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.124542 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.275392 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.275482 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.275517 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lwmb\" (UniqueName: \"kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.377632 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.377702 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lwmb\" (UniqueName: \"kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.377798 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.378250 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.378405 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.398343 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lwmb\" (UniqueName: \"kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb\") pod \"certified-operators-6c4kr\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.439296 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:41 crc kubenswrapper[4783]: I0930 15:01:41.894518 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:42 crc kubenswrapper[4783]: I0930 15:01:42.081852 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerStarted","Data":"4f9366d7c630b76f52a9c21ab1cb920c1502dd1522fdb09b3be0b5a6789cc9ae"} Sep 30 15:01:43 crc kubenswrapper[4783]: I0930 15:01:43.092142 4783 generic.go:334] "Generic (PLEG): container finished" podID="f802f3aa-fd65-4199-8666-06491949913e" containerID="b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98" exitCode=0 Sep 30 15:01:43 crc kubenswrapper[4783]: I0930 15:01:43.092328 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerDied","Data":"b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98"} Sep 30 15:01:45 crc kubenswrapper[4783]: I0930 15:01:45.110945 4783 generic.go:334] "Generic (PLEG): container finished" podID="f802f3aa-fd65-4199-8666-06491949913e" containerID="72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e" exitCode=0 Sep 30 15:01:45 crc kubenswrapper[4783]: I0930 15:01:45.111030 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerDied","Data":"72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e"} Sep 30 15:01:45 crc kubenswrapper[4783]: I0930 15:01:45.843548 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:01:45 crc kubenswrapper[4783]: E0930 15:01:45.844326 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:01:46 crc kubenswrapper[4783]: I0930 15:01:46.121025 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerStarted","Data":"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b"} Sep 30 15:01:46 crc kubenswrapper[4783]: I0930 15:01:46.146850 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6c4kr" podStartSLOduration=2.474158972 podStartE2EDuration="5.146828102s" podCreationTimestamp="2025-09-30 15:01:41 +0000 UTC" firstStartedPulling="2025-09-30 15:01:43.094077423 +0000 UTC m=+5203.025543730" lastFinishedPulling="2025-09-30 15:01:45.766746553 +0000 UTC m=+5205.698212860" observedRunningTime="2025-09-30 15:01:46.140472278 +0000 UTC m=+5206.071938585" watchObservedRunningTime="2025-09-30 15:01:46.146828102 +0000 UTC m=+5206.078294419" Sep 30 15:01:51 crc kubenswrapper[4783]: I0930 15:01:51.439448 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:51 crc kubenswrapper[4783]: I0930 15:01:51.439923 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:51 crc kubenswrapper[4783]: I0930 15:01:51.486630 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:52 crc kubenswrapper[4783]: I0930 15:01:52.226691 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:52 crc kubenswrapper[4783]: I0930 15:01:52.271874 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.190873 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6c4kr" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="registry-server" containerID="cri-o://29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b" gracePeriod=2 Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.590175 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.785914 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities\") pod \"f802f3aa-fd65-4199-8666-06491949913e\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.785966 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lwmb\" (UniqueName: \"kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb\") pod \"f802f3aa-fd65-4199-8666-06491949913e\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.786053 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content\") pod \"f802f3aa-fd65-4199-8666-06491949913e\" (UID: \"f802f3aa-fd65-4199-8666-06491949913e\") " Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.787128 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities" (OuterVolumeSpecName: "utilities") pod "f802f3aa-fd65-4199-8666-06491949913e" (UID: "f802f3aa-fd65-4199-8666-06491949913e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.792113 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb" (OuterVolumeSpecName: "kube-api-access-8lwmb") pod "f802f3aa-fd65-4199-8666-06491949913e" (UID: "f802f3aa-fd65-4199-8666-06491949913e"). InnerVolumeSpecName "kube-api-access-8lwmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.840716 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f802f3aa-fd65-4199-8666-06491949913e" (UID: "f802f3aa-fd65-4199-8666-06491949913e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.887820 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.887876 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lwmb\" (UniqueName: \"kubernetes.io/projected/f802f3aa-fd65-4199-8666-06491949913e-kube-api-access-8lwmb\") on node \"crc\" DevicePath \"\"" Sep 30 15:01:54 crc kubenswrapper[4783]: I0930 15:01:54.887892 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f802f3aa-fd65-4199-8666-06491949913e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.204095 4783 generic.go:334] "Generic (PLEG): container finished" podID="f802f3aa-fd65-4199-8666-06491949913e" containerID="29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b" exitCode=0 Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.204142 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerDied","Data":"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b"} Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.204168 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6c4kr" event={"ID":"f802f3aa-fd65-4199-8666-06491949913e","Type":"ContainerDied","Data":"4f9366d7c630b76f52a9c21ab1cb920c1502dd1522fdb09b3be0b5a6789cc9ae"} Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.204188 4783 scope.go:117] "RemoveContainer" containerID="29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.204207 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6c4kr" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.237291 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.240337 4783 scope.go:117] "RemoveContainer" containerID="72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.246409 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6c4kr"] Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.262469 4783 scope.go:117] "RemoveContainer" containerID="b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.291303 4783 scope.go:117] "RemoveContainer" containerID="29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b" Sep 30 15:01:55 crc kubenswrapper[4783]: E0930 15:01:55.291986 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b\": container with ID starting with 29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b not found: ID does not exist" containerID="29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.292032 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b"} err="failed to get container status \"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b\": rpc error: code = NotFound desc = could not find container \"29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b\": container with ID starting with 29a809dacf64144f7090bc3ee2b9b65128aa4630517b3dcf9bb888907180d23b not found: ID does not exist" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.292065 4783 scope.go:117] "RemoveContainer" containerID="72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e" Sep 30 15:01:55 crc kubenswrapper[4783]: E0930 15:01:55.292431 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e\": container with ID starting with 72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e not found: ID does not exist" containerID="72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.292455 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e"} err="failed to get container status \"72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e\": rpc error: code = NotFound desc = could not find container \"72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e\": container with ID starting with 72a205011a9848876d9ab0ead87945d9658c7f212602ab5572f3eb45fbc4ca3e not found: ID does not exist" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.292472 4783 scope.go:117] "RemoveContainer" containerID="b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98" Sep 30 15:01:55 crc kubenswrapper[4783]: E0930 15:01:55.292779 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98\": container with ID starting with b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98 not found: ID does not exist" containerID="b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98" Sep 30 15:01:55 crc kubenswrapper[4783]: I0930 15:01:55.292804 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98"} err="failed to get container status \"b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98\": rpc error: code = NotFound desc = could not find container \"b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98\": container with ID starting with b8ec195dc0cf9b9d04423e65b6517494b980e38c3de223926879686650ac7c98 not found: ID does not exist" Sep 30 15:01:56 crc kubenswrapper[4783]: I0930 15:01:56.854339 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f802f3aa-fd65-4199-8666-06491949913e" path="/var/lib/kubelet/pods/f802f3aa-fd65-4199-8666-06491949913e/volumes" Sep 30 15:01:59 crc kubenswrapper[4783]: I0930 15:01:59.843575 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:01:59 crc kubenswrapper[4783]: E0930 15:01:59.844159 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:02:10 crc kubenswrapper[4783]: I0930 15:02:10.843864 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:02:10 crc kubenswrapper[4783]: E0930 15:02:10.844513 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:02:18 crc kubenswrapper[4783]: I0930 15:02:18.961501 4783 scope.go:117] "RemoveContainer" containerID="0a26d4d43a48344ce605a8b6a0205ac4f4e3076546e09740b36f8eb6cb764505" Sep 30 15:02:21 crc kubenswrapper[4783]: I0930 15:02:21.843669 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:02:21 crc kubenswrapper[4783]: E0930 15:02:21.844176 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:02:32 crc kubenswrapper[4783]: I0930 15:02:32.843998 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:02:32 crc kubenswrapper[4783]: E0930 15:02:32.845042 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:02:43 crc kubenswrapper[4783]: I0930 15:02:43.843433 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:02:43 crc kubenswrapper[4783]: E0930 15:02:43.844049 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:02:58 crc kubenswrapper[4783]: I0930 15:02:58.843517 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:02:58 crc kubenswrapper[4783]: E0930 15:02:58.844185 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:03:10 crc kubenswrapper[4783]: I0930 15:03:10.852369 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:03:10 crc kubenswrapper[4783]: E0930 15:03:10.853175 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:03:24 crc kubenswrapper[4783]: I0930 15:03:24.843803 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:03:24 crc kubenswrapper[4783]: E0930 15:03:24.844695 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.556960 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:37 crc kubenswrapper[4783]: E0930 15:03:37.558162 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="registry-server" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.558182 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="registry-server" Sep 30 15:03:37 crc kubenswrapper[4783]: E0930 15:03:37.558210 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="extract-utilities" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.558218 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="extract-utilities" Sep 30 15:03:37 crc kubenswrapper[4783]: E0930 15:03:37.558306 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="extract-content" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.558316 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="extract-content" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.558571 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="f802f3aa-fd65-4199-8666-06491949913e" containerName="registry-server" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.559933 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.577104 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.748191 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.748494 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xpvp\" (UniqueName: \"kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.748665 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.849697 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.850024 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xpvp\" (UniqueName: \"kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.850191 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.850751 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.850957 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.869647 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xpvp\" (UniqueName: \"kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp\") pod \"redhat-operators-4dhc4\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:37 crc kubenswrapper[4783]: I0930 15:03:37.905256 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:38 crc kubenswrapper[4783]: I0930 15:03:38.352304 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:39 crc kubenswrapper[4783]: I0930 15:03:39.112593 4783 generic.go:334] "Generic (PLEG): container finished" podID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerID="2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a" exitCode=0 Sep 30 15:03:39 crc kubenswrapper[4783]: I0930 15:03:39.112785 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerDied","Data":"2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a"} Sep 30 15:03:39 crc kubenswrapper[4783]: I0930 15:03:39.112956 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerStarted","Data":"63eab9abde3b7becb88469723ec4486b4b9a12fc3b4d840bbccc589083e80cb5"} Sep 30 15:03:39 crc kubenswrapper[4783]: I0930 15:03:39.843156 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:03:39 crc kubenswrapper[4783]: E0930 15:03:39.843708 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:03:41 crc kubenswrapper[4783]: I0930 15:03:41.128372 4783 generic.go:334] "Generic (PLEG): container finished" podID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerID="c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d" exitCode=0 Sep 30 15:03:41 crc kubenswrapper[4783]: I0930 15:03:41.128774 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerDied","Data":"c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d"} Sep 30 15:03:42 crc kubenswrapper[4783]: I0930 15:03:42.139551 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerStarted","Data":"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54"} Sep 30 15:03:42 crc kubenswrapper[4783]: I0930 15:03:42.157524 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4dhc4" podStartSLOduration=2.638301716 podStartE2EDuration="5.157485872s" podCreationTimestamp="2025-09-30 15:03:37 +0000 UTC" firstStartedPulling="2025-09-30 15:03:39.118691341 +0000 UTC m=+5319.050157648" lastFinishedPulling="2025-09-30 15:03:41.637875497 +0000 UTC m=+5321.569341804" observedRunningTime="2025-09-30 15:03:42.153379331 +0000 UTC m=+5322.084845658" watchObservedRunningTime="2025-09-30 15:03:42.157485872 +0000 UTC m=+5322.088952189" Sep 30 15:03:47 crc kubenswrapper[4783]: I0930 15:03:47.905666 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:47 crc kubenswrapper[4783]: I0930 15:03:47.906269 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:47 crc kubenswrapper[4783]: I0930 15:03:47.950014 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:48 crc kubenswrapper[4783]: I0930 15:03:48.226989 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:48 crc kubenswrapper[4783]: I0930 15:03:48.269076 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:50 crc kubenswrapper[4783]: I0930 15:03:50.214538 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4dhc4" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="registry-server" containerID="cri-o://6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54" gracePeriod=2 Sep 30 15:03:50 crc kubenswrapper[4783]: I0930 15:03:50.848819 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:03:50 crc kubenswrapper[4783]: E0930 15:03:50.849257 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.174022 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.224765 4783 generic.go:334] "Generic (PLEG): container finished" podID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerID="6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54" exitCode=0 Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.224821 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dhc4" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.224820 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerDied","Data":"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54"} Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.224951 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dhc4" event={"ID":"aea14e06-b6f6-4f1b-88ca-5034f159869b","Type":"ContainerDied","Data":"63eab9abde3b7becb88469723ec4486b4b9a12fc3b4d840bbccc589083e80cb5"} Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.224973 4783 scope.go:117] "RemoveContainer" containerID="6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.246557 4783 scope.go:117] "RemoveContainer" containerID="c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.261490 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xpvp\" (UniqueName: \"kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp\") pod \"aea14e06-b6f6-4f1b-88ca-5034f159869b\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.261556 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content\") pod \"aea14e06-b6f6-4f1b-88ca-5034f159869b\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.261594 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities\") pod \"aea14e06-b6f6-4f1b-88ca-5034f159869b\" (UID: \"aea14e06-b6f6-4f1b-88ca-5034f159869b\") " Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.266093 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities" (OuterVolumeSpecName: "utilities") pod "aea14e06-b6f6-4f1b-88ca-5034f159869b" (UID: "aea14e06-b6f6-4f1b-88ca-5034f159869b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.269049 4783 scope.go:117] "RemoveContainer" containerID="2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.269159 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp" (OuterVolumeSpecName: "kube-api-access-7xpvp") pod "aea14e06-b6f6-4f1b-88ca-5034f159869b" (UID: "aea14e06-b6f6-4f1b-88ca-5034f159869b"). InnerVolumeSpecName "kube-api-access-7xpvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.323735 4783 scope.go:117] "RemoveContainer" containerID="6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54" Sep 30 15:03:51 crc kubenswrapper[4783]: E0930 15:03:51.324297 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54\": container with ID starting with 6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54 not found: ID does not exist" containerID="6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.324392 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54"} err="failed to get container status \"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54\": rpc error: code = NotFound desc = could not find container \"6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54\": container with ID starting with 6e3bd79b46121573dda8f7eef4dc3301f9efc5adc9a94c1f4a0257879c87df54 not found: ID does not exist" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.324463 4783 scope.go:117] "RemoveContainer" containerID="c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d" Sep 30 15:03:51 crc kubenswrapper[4783]: E0930 15:03:51.324949 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d\": container with ID starting with c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d not found: ID does not exist" containerID="c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.324993 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d"} err="failed to get container status \"c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d\": rpc error: code = NotFound desc = could not find container \"c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d\": container with ID starting with c1844c78391653f73eb49b85e468547e3eb8401b86358ab4fd969a2fc9dee42d not found: ID does not exist" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.325021 4783 scope.go:117] "RemoveContainer" containerID="2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a" Sep 30 15:03:51 crc kubenswrapper[4783]: E0930 15:03:51.325521 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a\": container with ID starting with 2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a not found: ID does not exist" containerID="2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.325608 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a"} err="failed to get container status \"2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a\": rpc error: code = NotFound desc = could not find container \"2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a\": container with ID starting with 2bc5f156d33ca8dce9d9e283d0dc4fab4816e3e1a89d9581c236c7a01f8eca3a not found: ID does not exist" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.363275 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xpvp\" (UniqueName: \"kubernetes.io/projected/aea14e06-b6f6-4f1b-88ca-5034f159869b-kube-api-access-7xpvp\") on node \"crc\" DevicePath \"\"" Sep 30 15:03:51 crc kubenswrapper[4783]: I0930 15:03:51.363328 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 15:03:52 crc kubenswrapper[4783]: I0930 15:03:52.595670 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aea14e06-b6f6-4f1b-88ca-5034f159869b" (UID: "aea14e06-b6f6-4f1b-88ca-5034f159869b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:03:52 crc kubenswrapper[4783]: I0930 15:03:52.683802 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aea14e06-b6f6-4f1b-88ca-5034f159869b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 15:03:52 crc kubenswrapper[4783]: I0930 15:03:52.753071 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:52 crc kubenswrapper[4783]: I0930 15:03:52.760472 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4dhc4"] Sep 30 15:03:52 crc kubenswrapper[4783]: I0930 15:03:52.855815 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" path="/var/lib/kubelet/pods/aea14e06-b6f6-4f1b-88ca-5034f159869b/volumes" Sep 30 15:04:01 crc kubenswrapper[4783]: I0930 15:04:01.843259 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:04:01 crc kubenswrapper[4783]: E0930 15:04:01.844115 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:04:16 crc kubenswrapper[4783]: I0930 15:04:16.843615 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:04:16 crc kubenswrapper[4783]: E0930 15:04:16.844447 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:04:28 crc kubenswrapper[4783]: I0930 15:04:28.842966 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:04:28 crc kubenswrapper[4783]: E0930 15:04:28.843747 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.417953 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 15:04:41 crc kubenswrapper[4783]: E0930 15:04:41.418877 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="extract-utilities" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.418892 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="extract-utilities" Sep 30 15:04:41 crc kubenswrapper[4783]: E0930 15:04:41.418913 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="registry-server" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.418920 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="registry-server" Sep 30 15:04:41 crc kubenswrapper[4783]: E0930 15:04:41.418949 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="extract-content" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.418958 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="extract-content" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.419139 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea14e06-b6f6-4f1b-88ca-5034f159869b" containerName="registry-server" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.419692 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.421639 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-k622k" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.442264 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.568677 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwmgp\" (UniqueName: \"kubernetes.io/projected/75ce7228-295e-4058-bb02-2750ffd56b4e-kube-api-access-pwmgp\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.568755 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.671208 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwmgp\" (UniqueName: \"kubernetes.io/projected/75ce7228-295e-4058-bb02-2750ffd56b4e-kube-api-access-pwmgp\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.671738 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.676576 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.676792 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b7a809498505c02ab58d758357a517d035e8432f6b19560ed1e6ea57ae92a159/globalmount\"" pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.698899 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwmgp\" (UniqueName: \"kubernetes.io/projected/75ce7228-295e-4058-bb02-2750ffd56b4e-kube-api-access-pwmgp\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.710544 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c37688de-109d-4628-8aaa-39d0760b3ffd\") pod \"mariadb-copy-data\" (UID: \"75ce7228-295e-4058-bb02-2750ffd56b4e\") " pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.744832 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 15:04:41 crc kubenswrapper[4783]: I0930 15:04:41.843595 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:04:41 crc kubenswrapper[4783]: E0930 15:04:41.843864 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:04:42 crc kubenswrapper[4783]: I0930 15:04:42.236913 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 15:04:42 crc kubenswrapper[4783]: I0930 15:04:42.619964 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"75ce7228-295e-4058-bb02-2750ffd56b4e","Type":"ContainerStarted","Data":"65552e8af6335c9cd26ec18e49c62da179bef6f8cb15b878ac1861d58066196c"} Sep 30 15:04:42 crc kubenswrapper[4783]: I0930 15:04:42.620008 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"75ce7228-295e-4058-bb02-2750ffd56b4e","Type":"ContainerStarted","Data":"c69838dc881dd4fb93e40e7d4941194691176ff96755ac25b91d1398fb07ea48"} Sep 30 15:04:42 crc kubenswrapper[4783]: I0930 15:04:42.642197 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.642162965 podStartE2EDuration="2.642162965s" podCreationTimestamp="2025-09-30 15:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:04:42.634549182 +0000 UTC m=+5382.566015499" watchObservedRunningTime="2025-09-30 15:04:42.642162965 +0000 UTC m=+5382.573629282" Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.340835 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.342401 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.346413 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.514532 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7twk9\" (UniqueName: \"kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9\") pod \"mariadb-client\" (UID: \"a4011460-ce0e-4b34-a241-33f908039f51\") " pod="openstack/mariadb-client" Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.616206 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7twk9\" (UniqueName: \"kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9\") pod \"mariadb-client\" (UID: \"a4011460-ce0e-4b34-a241-33f908039f51\") " pod="openstack/mariadb-client" Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.635215 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7twk9\" (UniqueName: \"kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9\") pod \"mariadb-client\" (UID: \"a4011460-ce0e-4b34-a241-33f908039f51\") " pod="openstack/mariadb-client" Sep 30 15:04:44 crc kubenswrapper[4783]: I0930 15:04:44.669167 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:45 crc kubenswrapper[4783]: I0930 15:04:45.069475 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:45 crc kubenswrapper[4783]: W0930 15:04:45.072007 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4011460_ce0e_4b34_a241_33f908039f51.slice/crio-c86d8b759bb00df95dd625448269443d8359fd7a3a99e88f3a8b1ee6cda2bbf0 WatchSource:0}: Error finding container c86d8b759bb00df95dd625448269443d8359fd7a3a99e88f3a8b1ee6cda2bbf0: Status 404 returned error can't find the container with id c86d8b759bb00df95dd625448269443d8359fd7a3a99e88f3a8b1ee6cda2bbf0 Sep 30 15:04:45 crc kubenswrapper[4783]: I0930 15:04:45.642930 4783 generic.go:334] "Generic (PLEG): container finished" podID="a4011460-ce0e-4b34-a241-33f908039f51" containerID="8c6e9350ff19d10a57d781a324f52cd8f0cd6df6fa77d3fca9fbbe02df0715c3" exitCode=0 Sep 30 15:04:45 crc kubenswrapper[4783]: I0930 15:04:45.643040 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a4011460-ce0e-4b34-a241-33f908039f51","Type":"ContainerDied","Data":"8c6e9350ff19d10a57d781a324f52cd8f0cd6df6fa77d3fca9fbbe02df0715c3"} Sep 30 15:04:45 crc kubenswrapper[4783]: I0930 15:04:45.643177 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a4011460-ce0e-4b34-a241-33f908039f51","Type":"ContainerStarted","Data":"c86d8b759bb00df95dd625448269443d8359fd7a3a99e88f3a8b1ee6cda2bbf0"} Sep 30 15:04:46 crc kubenswrapper[4783]: I0930 15:04:46.946444 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:46 crc kubenswrapper[4783]: I0930 15:04:46.970411 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_a4011460-ce0e-4b34-a241-33f908039f51/mariadb-client/0.log" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.010593 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.025716 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.052469 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7twk9\" (UniqueName: \"kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9\") pod \"a4011460-ce0e-4b34-a241-33f908039f51\" (UID: \"a4011460-ce0e-4b34-a241-33f908039f51\") " Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.057917 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9" (OuterVolumeSpecName: "kube-api-access-7twk9") pod "a4011460-ce0e-4b34-a241-33f908039f51" (UID: "a4011460-ce0e-4b34-a241-33f908039f51"). InnerVolumeSpecName "kube-api-access-7twk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.132894 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:47 crc kubenswrapper[4783]: E0930 15:04:47.133336 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4011460-ce0e-4b34-a241-33f908039f51" containerName="mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.133360 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4011460-ce0e-4b34-a241-33f908039f51" containerName="mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.133586 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4011460-ce0e-4b34-a241-33f908039f51" containerName="mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.134089 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.141208 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.154425 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7twk9\" (UniqueName: \"kubernetes.io/projected/a4011460-ce0e-4b34-a241-33f908039f51-kube-api-access-7twk9\") on node \"crc\" DevicePath \"\"" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.256662 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wng5d\" (UniqueName: \"kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d\") pod \"mariadb-client\" (UID: \"7f43e21c-64f2-4dd3-bf24-3dca54c18597\") " pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.357622 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wng5d\" (UniqueName: \"kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d\") pod \"mariadb-client\" (UID: \"7f43e21c-64f2-4dd3-bf24-3dca54c18597\") " pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.372967 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wng5d\" (UniqueName: \"kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d\") pod \"mariadb-client\" (UID: \"7f43e21c-64f2-4dd3-bf24-3dca54c18597\") " pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.454165 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.658600 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c86d8b759bb00df95dd625448269443d8359fd7a3a99e88f3a8b1ee6cda2bbf0" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.658682 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.674629 4783 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="a4011460-ce0e-4b34-a241-33f908039f51" podUID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" Sep 30 15:04:47 crc kubenswrapper[4783]: I0930 15:04:47.852106 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:47 crc kubenswrapper[4783]: W0930 15:04:47.862417 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f43e21c_64f2_4dd3_bf24_3dca54c18597.slice/crio-23982c8c540b0f5f4ebcacdaf781b5ec2077544bbc9831f5f8d7619c13a7e422 WatchSource:0}: Error finding container 23982c8c540b0f5f4ebcacdaf781b5ec2077544bbc9831f5f8d7619c13a7e422: Status 404 returned error can't find the container with id 23982c8c540b0f5f4ebcacdaf781b5ec2077544bbc9831f5f8d7619c13a7e422 Sep 30 15:04:48 crc kubenswrapper[4783]: I0930 15:04:48.668634 4783 generic.go:334] "Generic (PLEG): container finished" podID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" containerID="ee508742a7e5e176ce2901b21d61ade7cf17e46e79695de0c5a9b0bceb5f2d9e" exitCode=0 Sep 30 15:04:48 crc kubenswrapper[4783]: I0930 15:04:48.668803 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"7f43e21c-64f2-4dd3-bf24-3dca54c18597","Type":"ContainerDied","Data":"ee508742a7e5e176ce2901b21d61ade7cf17e46e79695de0c5a9b0bceb5f2d9e"} Sep 30 15:04:48 crc kubenswrapper[4783]: I0930 15:04:48.668931 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"7f43e21c-64f2-4dd3-bf24-3dca54c18597","Type":"ContainerStarted","Data":"23982c8c540b0f5f4ebcacdaf781b5ec2077544bbc9831f5f8d7619c13a7e422"} Sep 30 15:04:48 crc kubenswrapper[4783]: I0930 15:04:48.853938 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4011460-ce0e-4b34-a241-33f908039f51" path="/var/lib/kubelet/pods/a4011460-ce0e-4b34-a241-33f908039f51/volumes" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.042061 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.062011 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_7f43e21c-64f2-4dd3-bf24-3dca54c18597/mariadb-client/0.log" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.090949 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.101008 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.203845 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wng5d\" (UniqueName: \"kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d\") pod \"7f43e21c-64f2-4dd3-bf24-3dca54c18597\" (UID: \"7f43e21c-64f2-4dd3-bf24-3dca54c18597\") " Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.209403 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d" (OuterVolumeSpecName: "kube-api-access-wng5d") pod "7f43e21c-64f2-4dd3-bf24-3dca54c18597" (UID: "7f43e21c-64f2-4dd3-bf24-3dca54c18597"). InnerVolumeSpecName "kube-api-access-wng5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.306254 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wng5d\" (UniqueName: \"kubernetes.io/projected/7f43e21c-64f2-4dd3-bf24-3dca54c18597-kube-api-access-wng5d\") on node \"crc\" DevicePath \"\"" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.687881 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23982c8c540b0f5f4ebcacdaf781b5ec2077544bbc9831f5f8d7619c13a7e422" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.687913 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 30 15:04:50 crc kubenswrapper[4783]: I0930 15:04:50.856179 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" path="/var/lib/kubelet/pods/7f43e21c-64f2-4dd3-bf24-3dca54c18597/volumes" Sep 30 15:04:52 crc kubenswrapper[4783]: I0930 15:04:52.844176 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:04:52 crc kubenswrapper[4783]: E0930 15:04:52.844891 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:05:04 crc kubenswrapper[4783]: I0930 15:05:04.843417 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:05:04 crc kubenswrapper[4783]: E0930 15:05:04.844159 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:05:19 crc kubenswrapper[4783]: I0930 15:05:19.842776 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:05:19 crc kubenswrapper[4783]: E0930 15:05:19.843458 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.005827 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 15:05:20 crc kubenswrapper[4783]: E0930 15:05:20.006202 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" containerName="mariadb-client" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.006243 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" containerName="mariadb-client" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.006475 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f43e21c-64f2-4dd3-bf24-3dca54c18597" containerName="mariadb-client" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.007500 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.011065 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.013119 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.013265 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8tmzl" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.013393 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.013524 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.022794 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.024437 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.034461 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.036067 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.049167 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.054993 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.061633 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178619 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178669 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178707 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178724 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178748 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-config\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178771 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178788 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178810 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178828 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178854 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-config\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178881 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178903 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.178999 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-config\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179091 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhs4n\" (UniqueName: \"kubernetes.io/projected/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-kube-api-access-zhs4n\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179121 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179144 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179158 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdggr\" (UniqueName: \"kubernetes.io/projected/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-kube-api-access-xdggr\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179184 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179254 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179344 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179369 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179461 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp5b7\" (UniqueName: \"kubernetes.io/projected/bba4023a-311b-4de2-b9fd-6ea88b42f47f-kube-api-access-vp5b7\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.179495 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281037 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281098 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281299 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-config\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281384 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281417 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281460 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281484 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281512 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-config\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281543 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281567 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281615 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-config\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281679 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhs4n\" (UniqueName: \"kubernetes.io/projected/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-kube-api-access-zhs4n\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281711 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281737 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281764 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdggr\" (UniqueName: \"kubernetes.io/projected/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-kube-api-access-xdggr\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281798 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281840 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281865 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281888 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281917 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281961 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp5b7\" (UniqueName: \"kubernetes.io/projected/bba4023a-311b-4de2-b9fd-6ea88b42f47f-kube-api-access-vp5b7\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.281988 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282087 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282115 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282612 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-config\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282637 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282956 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.282995 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.283141 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.283203 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-config\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.283373 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-config\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.283585 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.283809 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bba4023a-311b-4de2-b9fd-6ea88b42f47f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.286452 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.286488 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a7e970ecd2368b8c6db7769fde3c494101d8cfa16f70e3b3632c62c1f7e604f1/globalmount\"" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.287157 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.287205 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/83e63295d2ceadab0d467656b43c16e7bce786bb92840d5c92c3fc2c4898a7e3/globalmount\"" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.288732 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.288764 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4e41ccd4432e4850ac6351a24a6b5451a3066c72319855c1319ff95047e0946a/globalmount\"" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.289065 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.289784 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.290783 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.293188 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba4023a-311b-4de2-b9fd-6ea88b42f47f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.294871 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.296091 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.299130 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.301698 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.303795 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp5b7\" (UniqueName: \"kubernetes.io/projected/bba4023a-311b-4de2-b9fd-6ea88b42f47f-kube-api-access-vp5b7\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.304879 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.306281 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhs4n\" (UniqueName: \"kubernetes.io/projected/2476cec9-1ee6-438b-9ccf-7a3fa57d474a-kube-api-access-zhs4n\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.308507 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdggr\" (UniqueName: \"kubernetes.io/projected/b67e5930-bf3a-4fb8-90f9-7a3e3446050d-kube-api-access-xdggr\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.332175 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da80fc82-ebe2-4fb6-b769-24eee390848c\") pod \"ovsdbserver-nb-0\" (UID: \"b67e5930-bf3a-4fb8-90f9-7a3e3446050d\") " pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.335569 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f977b490-6209-42d5-82c8-2a8978d5e6dd\") pod \"ovsdbserver-nb-1\" (UID: \"bba4023a-311b-4de2-b9fd-6ea88b42f47f\") " pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.339842 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6a747a92-237c-4e2b-ae0d-1859b90e7db0\") pod \"ovsdbserver-nb-2\" (UID: \"2476cec9-1ee6-438b-9ccf-7a3fa57d474a\") " pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.350740 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.368061 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.630199 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.911091 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 30 15:05:20 crc kubenswrapper[4783]: W0930 15:05:20.916580 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2476cec9_1ee6_438b_9ccf_7a3fa57d474a.slice/crio-2097779fbd65248f9d82ac10a5d762ed8b33a32ab9f289df5fff3e9e96dbd52c WatchSource:0}: Error finding container 2097779fbd65248f9d82ac10a5d762ed8b33a32ab9f289df5fff3e9e96dbd52c: Status 404 returned error can't find the container with id 2097779fbd65248f9d82ac10a5d762ed8b33a32ab9f289df5fff3e9e96dbd52c Sep 30 15:05:20 crc kubenswrapper[4783]: I0930 15:05:20.929384 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"2476cec9-1ee6-438b-9ccf-7a3fa57d474a","Type":"ContainerStarted","Data":"2097779fbd65248f9d82ac10a5d762ed8b33a32ab9f289df5fff3e9e96dbd52c"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.007637 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 30 15:05:21 crc kubenswrapper[4783]: W0930 15:05:21.018455 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbba4023a_311b_4de2_b9fd_6ea88b42f47f.slice/crio-c45ec11eb8dcb74121d052ff72007bb61c0243b1b0c991e2f06caecc67f0351e WatchSource:0}: Error finding container c45ec11eb8dcb74121d052ff72007bb61c0243b1b0c991e2f06caecc67f0351e: Status 404 returned error can't find the container with id c45ec11eb8dcb74121d052ff72007bb61c0243b1b0c991e2f06caecc67f0351e Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.151894 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 15:05:21 crc kubenswrapper[4783]: W0930 15:05:21.156303 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb67e5930_bf3a_4fb8_90f9_7a3e3446050d.slice/crio-2743051d8ad5652dd08fa283f22098ef1376c48d0fe2a2b5ffea4fcb11df3224 WatchSource:0}: Error finding container 2743051d8ad5652dd08fa283f22098ef1376c48d0fe2a2b5ffea4fcb11df3224: Status 404 returned error can't find the container with id 2743051d8ad5652dd08fa283f22098ef1376c48d0fe2a2b5ffea4fcb11df3224 Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.939513 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"bba4023a-311b-4de2-b9fd-6ea88b42f47f","Type":"ContainerStarted","Data":"823af2f84f6bacd748a474ecdb26d0b5091101f0174ae4d86a7cb84f0d433324"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.940593 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"bba4023a-311b-4de2-b9fd-6ea88b42f47f","Type":"ContainerStarted","Data":"9f581682eacd784f55b31a7d8f00033bae087a37662e5e630fa0b8a31423e56e"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.940628 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"bba4023a-311b-4de2-b9fd-6ea88b42f47f","Type":"ContainerStarted","Data":"c45ec11eb8dcb74121d052ff72007bb61c0243b1b0c991e2f06caecc67f0351e"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.943729 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"2476cec9-1ee6-438b-9ccf-7a3fa57d474a","Type":"ContainerStarted","Data":"c8850b69b7a28f05799461fd126bae12a0fdd2feb292c205ccbaf2451dab6e05"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.943772 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"2476cec9-1ee6-438b-9ccf-7a3fa57d474a","Type":"ContainerStarted","Data":"99b83b551cff969e14480ae38837a89387814594e3237dc159147e0a93abebd9"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.947790 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b67e5930-bf3a-4fb8-90f9-7a3e3446050d","Type":"ContainerStarted","Data":"8e86b9135d6e44a40c080082497414f64d88ca7176b308a42e407017dd584e50"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.947853 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b67e5930-bf3a-4fb8-90f9-7a3e3446050d","Type":"ContainerStarted","Data":"834b96020d954165ec44b4ee4d66d171c8aa87f735f2c40ac22860669d4e44d1"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.947865 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b67e5930-bf3a-4fb8-90f9-7a3e3446050d","Type":"ContainerStarted","Data":"2743051d8ad5652dd08fa283f22098ef1376c48d0fe2a2b5ffea4fcb11df3224"} Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.967371 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.967329537 podStartE2EDuration="3.967329537s" podCreationTimestamp="2025-09-30 15:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:21.960189869 +0000 UTC m=+5421.891656196" watchObservedRunningTime="2025-09-30 15:05:21.967329537 +0000 UTC m=+5421.898795844" Sep 30 15:05:21 crc kubenswrapper[4783]: I0930 15:05:21.985789 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.985769378 podStartE2EDuration="3.985769378s" podCreationTimestamp="2025-09-30 15:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:21.982014187 +0000 UTC m=+5421.913480504" watchObservedRunningTime="2025-09-30 15:05:21.985769378 +0000 UTC m=+5421.917235685" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.003469 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.003446904 podStartE2EDuration="4.003446904s" podCreationTimestamp="2025-09-30 15:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:22.001333656 +0000 UTC m=+5421.932799963" watchObservedRunningTime="2025-09-30 15:05:22.003446904 +0000 UTC m=+5421.934913211" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.389155 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.390691 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.393174 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.393420 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.393464 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.397638 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-nktr2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.405734 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.418476 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.420133 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.425322 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.426598 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.441612 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.449920 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.522792 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.522849 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-config\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.522867 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.522891 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k24mn\" (UniqueName: \"kubernetes.io/projected/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-kube-api-access-k24mn\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.522982 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523096 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523164 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523204 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523281 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523307 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523361 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-config\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523435 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523465 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfqxq\" (UniqueName: \"kubernetes.io/projected/1a586697-ba7d-4413-8340-e9cbd3ea7424-kube-api-access-kfqxq\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523558 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523587 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523626 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523675 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523717 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523744 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523767 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-config\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523803 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs922\" (UniqueName: \"kubernetes.io/projected/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-kube-api-access-vs922\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523827 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523854 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.523877 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625438 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625515 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-config\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625546 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625575 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k24mn\" (UniqueName: \"kubernetes.io/projected/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-kube-api-access-k24mn\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625600 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625624 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625647 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625665 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625689 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625704 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625726 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-config\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625751 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625766 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfqxq\" (UniqueName: \"kubernetes.io/projected/1a586697-ba7d-4413-8340-e9cbd3ea7424-kube-api-access-kfqxq\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625783 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625799 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625817 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625840 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625866 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625883 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625899 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-config\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625922 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs922\" (UniqueName: \"kubernetes.io/projected/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-kube-api-access-vs922\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625938 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625955 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.625969 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627093 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-config\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627191 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627502 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627689 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627730 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.627837 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a586697-ba7d-4413-8340-e9cbd3ea7424-config\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.628966 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.629055 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.629396 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-config\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.631790 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.631992 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.632034 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/965528f10a86e25c681a4c5ced68f8eb9690ccbdffdd6a5c61e18282d0b9a330/globalmount\"" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.632386 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.632489 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e2df74fff017b4ba1b87d1fd3d25c0ac70aae558f1f6a783848c43f46b08a7e6/globalmount\"" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.632802 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.633011 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.633045 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ad509cc888113ec77b7255d9a6a7520dfbbb639dc412741fae16d460e322894a/globalmount\"" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.633243 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.633937 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.635705 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.636294 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.636809 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.637987 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.642536 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a586697-ba7d-4413-8340-e9cbd3ea7424-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.644986 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k24mn\" (UniqueName: \"kubernetes.io/projected/315aa6a3-f75b-41b5-9aef-f1d4b659ebab-kube-api-access-k24mn\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.646104 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs922\" (UniqueName: \"kubernetes.io/projected/a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748-kube-api-access-vs922\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.646119 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfqxq\" (UniqueName: \"kubernetes.io/projected/1a586697-ba7d-4413-8340-e9cbd3ea7424-kube-api-access-kfqxq\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.661016 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6eab80ad-228e-476b-8396-5d1b72d5abc0\") pod \"ovsdbserver-sb-2\" (UID: \"1a586697-ba7d-4413-8340-e9cbd3ea7424\") " pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.661500 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-202f39c4-8ad7-46dc-bf4a-54d5a5038c63\") pod \"ovsdbserver-sb-0\" (UID: \"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748\") " pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.668418 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-65b8bcbb-21f9-4122-be44-9570db96ddb0\") pod \"ovsdbserver-sb-1\" (UID: \"315aa6a3-f75b-41b5-9aef-f1d4b659ebab\") " pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.724842 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.747166 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:22 crc kubenswrapper[4783]: I0930 15:05:22.758606 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.232777 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 15:05:23 crc kubenswrapper[4783]: W0930 15:05:23.241526 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a8f3d9_de5d_4eb4_a8b8_4a85fea71748.slice/crio-e544998fe31ef8b2a0ab8cb32d3c638daa1ae5d8bcba2ad8d0d7ae0882a85f7e WatchSource:0}: Error finding container e544998fe31ef8b2a0ab8cb32d3c638daa1ae5d8bcba2ad8d0d7ae0882a85f7e: Status 404 returned error can't find the container with id e544998fe31ef8b2a0ab8cb32d3c638daa1ae5d8bcba2ad8d0d7ae0882a85f7e Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.339683 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 30 15:05:23 crc kubenswrapper[4783]: W0930 15:05:23.345565 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod315aa6a3_f75b_41b5_9aef_f1d4b659ebab.slice/crio-29ec78dd946a93051766ada8fa3e883d55e54b5237cbe07960a8aa2ffa7b9333 WatchSource:0}: Error finding container 29ec78dd946a93051766ada8fa3e883d55e54b5237cbe07960a8aa2ffa7b9333: Status 404 returned error can't find the container with id 29ec78dd946a93051766ada8fa3e883d55e54b5237cbe07960a8aa2ffa7b9333 Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.351946 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.368237 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.631348 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.971017 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748","Type":"ContainerStarted","Data":"cd273d18f2fcb5342cd0e55367f624c9bdee2746abc7acc8719ab1f6d0ac8428"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.971079 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748","Type":"ContainerStarted","Data":"fcdacac374b97a8b212a83f59a091dfc12e48ef8c1b5f7b10534ad1997e29ece"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.971094 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748","Type":"ContainerStarted","Data":"e544998fe31ef8b2a0ab8cb32d3c638daa1ae5d8bcba2ad8d0d7ae0882a85f7e"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.974187 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"315aa6a3-f75b-41b5-9aef-f1d4b659ebab","Type":"ContainerStarted","Data":"f2601a439cd311418d788cc1697c1a848b0d45c6715e8ea37f3729f40bee27a2"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.974281 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"315aa6a3-f75b-41b5-9aef-f1d4b659ebab","Type":"ContainerStarted","Data":"2d0d80b10bbb92c30e2b0a91b2067e0f8f3242db63a64f4bc760e19682aed8c1"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.974299 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"315aa6a3-f75b-41b5-9aef-f1d4b659ebab","Type":"ContainerStarted","Data":"29ec78dd946a93051766ada8fa3e883d55e54b5237cbe07960a8aa2ffa7b9333"} Sep 30 15:05:23 crc kubenswrapper[4783]: I0930 15:05:23.993251 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=2.99320494 podStartE2EDuration="2.99320494s" podCreationTimestamp="2025-09-30 15:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:23.990146752 +0000 UTC m=+5423.921613049" watchObservedRunningTime="2025-09-30 15:05:23.99320494 +0000 UTC m=+5423.924671247" Sep 30 15:05:24 crc kubenswrapper[4783]: I0930 15:05:24.014394 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.014371677 podStartE2EDuration="3.014371677s" podCreationTimestamp="2025-09-30 15:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:24.010664999 +0000 UTC m=+5423.942131306" watchObservedRunningTime="2025-09-30 15:05:24.014371677 +0000 UTC m=+5423.945837984" Sep 30 15:05:24 crc kubenswrapper[4783]: I0930 15:05:24.388199 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 30 15:05:24 crc kubenswrapper[4783]: W0930 15:05:24.391823 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a586697_ba7d_4413_8340_e9cbd3ea7424.slice/crio-9b813283764d4ba98ba45f380eda4e3ee53324abac670dd853277a41d5590c78 WatchSource:0}: Error finding container 9b813283764d4ba98ba45f380eda4e3ee53324abac670dd853277a41d5590c78: Status 404 returned error can't find the container with id 9b813283764d4ba98ba45f380eda4e3ee53324abac670dd853277a41d5590c78 Sep 30 15:05:24 crc kubenswrapper[4783]: I0930 15:05:24.983512 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1a586697-ba7d-4413-8340-e9cbd3ea7424","Type":"ContainerStarted","Data":"8962180e151c805d385236a3b5f15934755e08ab75ab76fb60ee2f8c1245c835"} Sep 30 15:05:24 crc kubenswrapper[4783]: I0930 15:05:24.984685 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1a586697-ba7d-4413-8340-e9cbd3ea7424","Type":"ContainerStarted","Data":"6b797e55b4ad35fb70ca20c751dd04b264bc72a157dbdfc9db3466d996967e96"} Sep 30 15:05:24 crc kubenswrapper[4783]: I0930 15:05:24.984700 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1a586697-ba7d-4413-8340-e9cbd3ea7424","Type":"ContainerStarted","Data":"9b813283764d4ba98ba45f380eda4e3ee53324abac670dd853277a41d5590c78"} Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.000179 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.000156478 podStartE2EDuration="4.000156478s" podCreationTimestamp="2025-09-30 15:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:24.999356032 +0000 UTC m=+5424.930822359" watchObservedRunningTime="2025-09-30 15:05:25.000156478 +0000 UTC m=+5424.931622805" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.351840 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.368927 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.631901 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.725553 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.747691 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:25 crc kubenswrapper[4783]: I0930 15:05:25.759388 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.389627 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.428999 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.440439 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.490011 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.617204 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.618677 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.624850 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.633025 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.677510 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.712121 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.712175 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsfkg\" (UniqueName: \"kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.712258 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.712319 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.724490 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.814129 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.814199 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsfkg\" (UniqueName: \"kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.814245 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.814321 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.815135 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.815164 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.815208 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.834029 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsfkg\" (UniqueName: \"kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg\") pod \"dnsmasq-dns-97cb7bf45-jhqhj\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:26 crc kubenswrapper[4783]: I0930 15:05:26.953801 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:27 crc kubenswrapper[4783]: I0930 15:05:27.393553 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:27 crc kubenswrapper[4783]: W0930 15:05:27.399116 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76832cc7_62d9_4eee_b61e_d67891fa13de.slice/crio-797fda22bdc8bd3e56f33c7ef8c7444536c6a5fa4d15939230e6fdf6d1cf5afb WatchSource:0}: Error finding container 797fda22bdc8bd3e56f33c7ef8c7444536c6a5fa4d15939230e6fdf6d1cf5afb: Status 404 returned error can't find the container with id 797fda22bdc8bd3e56f33c7ef8c7444536c6a5fa4d15939230e6fdf6d1cf5afb Sep 30 15:05:27 crc kubenswrapper[4783]: I0930 15:05:27.725977 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:27 crc kubenswrapper[4783]: I0930 15:05:27.747993 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:27 crc kubenswrapper[4783]: I0930 15:05:27.758815 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.009979 4783 generic.go:334] "Generic (PLEG): container finished" podID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerID="b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9" exitCode=0 Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.010028 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" event={"ID":"76832cc7-62d9-4eee-b61e-d67891fa13de","Type":"ContainerDied","Data":"b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9"} Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.010625 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" event={"ID":"76832cc7-62d9-4eee-b61e-d67891fa13de","Type":"ContainerStarted","Data":"797fda22bdc8bd3e56f33c7ef8c7444536c6a5fa4d15939230e6fdf6d1cf5afb"} Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.764937 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.792747 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.801440 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.809425 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.866051 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Sep 30 15:05:28 crc kubenswrapper[4783]: I0930 15:05:28.994885 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.018534 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.020769 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.023361 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.023561 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" event={"ID":"76832cc7-62d9-4eee-b61e-d67891fa13de","Type":"ContainerStarted","Data":"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092"} Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.024192 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.037891 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.084614 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" podStartSLOduration=3.084587907 podStartE2EDuration="3.084587907s" podCreationTimestamp="2025-09-30 15:05:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:29.075956471 +0000 UTC m=+5429.007422778" watchObservedRunningTime="2025-09-30 15:05:29.084587907 +0000 UTC m=+5429.016054214" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.091562 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.160750 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.160964 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.161061 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsdfh\" (UniqueName: \"kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.161095 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.162926 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.264434 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.264535 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.264625 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.264703 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsdfh\" (UniqueName: \"kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.264724 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.265634 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.265741 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.265830 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.265889 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.284412 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsdfh\" (UniqueName: \"kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh\") pod \"dnsmasq-dns-5f959578df-vbwjf\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.342353 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:29 crc kubenswrapper[4783]: I0930 15:05:29.682398 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:05:29 crc kubenswrapper[4783]: W0930 15:05:29.696838 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06c125eb_bc02_42b4_a1d5_523be036ab98.slice/crio-f28d840f1e60f72f3aa434044c4f188abd62b402550c29c657939e17e50556cf WatchSource:0}: Error finding container f28d840f1e60f72f3aa434044c4f188abd62b402550c29c657939e17e50556cf: Status 404 returned error can't find the container with id f28d840f1e60f72f3aa434044c4f188abd62b402550c29c657939e17e50556cf Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.033181 4783 generic.go:334] "Generic (PLEG): container finished" podID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerID="f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9" exitCode=0 Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.033287 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" event={"ID":"06c125eb-bc02-42b4-a1d5-523be036ab98","Type":"ContainerDied","Data":"f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9"} Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.033511 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" event={"ID":"06c125eb-bc02-42b4-a1d5-523be036ab98","Type":"ContainerStarted","Data":"f28d840f1e60f72f3aa434044c4f188abd62b402550c29c657939e17e50556cf"} Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.033666 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="dnsmasq-dns" containerID="cri-o://034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092" gracePeriod=10 Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.491800 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.589695 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb\") pod \"76832cc7-62d9-4eee-b61e-d67891fa13de\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.589752 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config\") pod \"76832cc7-62d9-4eee-b61e-d67891fa13de\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.589966 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsfkg\" (UniqueName: \"kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg\") pod \"76832cc7-62d9-4eee-b61e-d67891fa13de\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.590009 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc\") pod \"76832cc7-62d9-4eee-b61e-d67891fa13de\" (UID: \"76832cc7-62d9-4eee-b61e-d67891fa13de\") " Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.607652 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg" (OuterVolumeSpecName: "kube-api-access-tsfkg") pod "76832cc7-62d9-4eee-b61e-d67891fa13de" (UID: "76832cc7-62d9-4eee-b61e-d67891fa13de"). InnerVolumeSpecName "kube-api-access-tsfkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.634486 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76832cc7-62d9-4eee-b61e-d67891fa13de" (UID: "76832cc7-62d9-4eee-b61e-d67891fa13de"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.635438 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76832cc7-62d9-4eee-b61e-d67891fa13de" (UID: "76832cc7-62d9-4eee-b61e-d67891fa13de"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.648065 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config" (OuterVolumeSpecName: "config") pod "76832cc7-62d9-4eee-b61e-d67891fa13de" (UID: "76832cc7-62d9-4eee-b61e-d67891fa13de"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.693252 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsfkg\" (UniqueName: \"kubernetes.io/projected/76832cc7-62d9-4eee-b61e-d67891fa13de-kube-api-access-tsfkg\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.693325 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.693343 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:30 crc kubenswrapper[4783]: I0930 15:05:30.693358 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76832cc7-62d9-4eee-b61e-d67891fa13de-config\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.046574 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" event={"ID":"06c125eb-bc02-42b4-a1d5-523be036ab98","Type":"ContainerStarted","Data":"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0"} Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.046869 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.050786 4783 generic.go:334] "Generic (PLEG): container finished" podID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerID="034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092" exitCode=0 Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.050895 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" event={"ID":"76832cc7-62d9-4eee-b61e-d67891fa13de","Type":"ContainerDied","Data":"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092"} Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.050918 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.051104 4783 scope.go:117] "RemoveContainer" containerID="034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.051084 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-97cb7bf45-jhqhj" event={"ID":"76832cc7-62d9-4eee-b61e-d67891fa13de","Type":"ContainerDied","Data":"797fda22bdc8bd3e56f33c7ef8c7444536c6a5fa4d15939230e6fdf6d1cf5afb"} Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.077266 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" podStartSLOduration=3.077238185 podStartE2EDuration="3.077238185s" podCreationTimestamp="2025-09-30 15:05:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:31.068060511 +0000 UTC m=+5430.999526858" watchObservedRunningTime="2025-09-30 15:05:31.077238185 +0000 UTC m=+5431.008704502" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.097793 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.099900 4783 scope.go:117] "RemoveContainer" containerID="b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.108084 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-97cb7bf45-jhqhj"] Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.129954 4783 scope.go:117] "RemoveContainer" containerID="034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092" Sep 30 15:05:31 crc kubenswrapper[4783]: E0930 15:05:31.130857 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092\": container with ID starting with 034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092 not found: ID does not exist" containerID="034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.130894 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092"} err="failed to get container status \"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092\": rpc error: code = NotFound desc = could not find container \"034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092\": container with ID starting with 034f02aab24980d543ecc742e7322bd067f0f893e2b6b13940b1c8be38275092 not found: ID does not exist" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.130920 4783 scope.go:117] "RemoveContainer" containerID="b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9" Sep 30 15:05:31 crc kubenswrapper[4783]: E0930 15:05:31.131276 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9\": container with ID starting with b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9 not found: ID does not exist" containerID="b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.131300 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9"} err="failed to get container status \"b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9\": rpc error: code = NotFound desc = could not find container \"b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9\": container with ID starting with b1544fb82a55ec0f4a1b86979c7f5317af827164dcb120c0a20f138da20686c9 not found: ID does not exist" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.352375 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Sep 30 15:05:31 crc kubenswrapper[4783]: E0930 15:05:31.352681 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="dnsmasq-dns" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.352697 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="dnsmasq-dns" Sep 30 15:05:31 crc kubenswrapper[4783]: E0930 15:05:31.352712 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="init" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.352721 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="init" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.352932 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" containerName="dnsmasq-dns" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.353487 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.355486 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.359150 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.505914 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/fd34392e-1155-4e74-994f-5b64a49ed9cb-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.505986 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25hq4\" (UniqueName: \"kubernetes.io/projected/fd34392e-1155-4e74-994f-5b64a49ed9cb-kube-api-access-25hq4\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.506088 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.608020 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/fd34392e-1155-4e74-994f-5b64a49ed9cb-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.608092 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25hq4\" (UniqueName: \"kubernetes.io/projected/fd34392e-1155-4e74-994f-5b64a49ed9cb-kube-api-access-25hq4\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.608126 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.611126 4783 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.611169 4783 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9abd07467f101cb8d4c0b6e50732c3c41d18fc21d9643d68ec82735fc9f6dae7/globalmount\"" pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.615176 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/fd34392e-1155-4e74-994f-5b64a49ed9cb-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.627499 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25hq4\" (UniqueName: \"kubernetes.io/projected/fd34392e-1155-4e74-994f-5b64a49ed9cb-kube-api-access-25hq4\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.655728 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-056c9da4-04f2-4837-b890-87a2c6f4fd8b\") pod \"ovn-copy-data\" (UID: \"fd34392e-1155-4e74-994f-5b64a49ed9cb\") " pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.672993 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 15:05:31 crc kubenswrapper[4783]: I0930 15:05:31.842989 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:05:31 crc kubenswrapper[4783]: E0930 15:05:31.843529 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:05:32 crc kubenswrapper[4783]: I0930 15:05:32.170799 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 15:05:32 crc kubenswrapper[4783]: I0930 15:05:32.183176 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 15:05:32 crc kubenswrapper[4783]: I0930 15:05:32.852210 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76832cc7-62d9-4eee-b61e-d67891fa13de" path="/var/lib/kubelet/pods/76832cc7-62d9-4eee-b61e-d67891fa13de/volumes" Sep 30 15:05:33 crc kubenswrapper[4783]: I0930 15:05:33.073895 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"fd34392e-1155-4e74-994f-5b64a49ed9cb","Type":"ContainerStarted","Data":"0eb7531a1f3eadeadcc653f24e5b278209f65348105ca34d6331849c7295dadb"} Sep 30 15:05:33 crc kubenswrapper[4783]: I0930 15:05:33.073945 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"fd34392e-1155-4e74-994f-5b64a49ed9cb","Type":"ContainerStarted","Data":"100971b80656609c402bbd81e42d6f1fa0e882ce93336a0716db31c0a94dbfea"} Sep 30 15:05:33 crc kubenswrapper[4783]: I0930 15:05:33.091757 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=2.547647182 podStartE2EDuration="3.091735122s" podCreationTimestamp="2025-09-30 15:05:30 +0000 UTC" firstStartedPulling="2025-09-30 15:05:32.182892444 +0000 UTC m=+5432.114358771" lastFinishedPulling="2025-09-30 15:05:32.726980364 +0000 UTC m=+5432.658446711" observedRunningTime="2025-09-30 15:05:33.085514793 +0000 UTC m=+5433.016981100" watchObservedRunningTime="2025-09-30 15:05:33.091735122 +0000 UTC m=+5433.023201429" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.751142 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.752845 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.757156 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 15:05:37 crc kubenswrapper[4783]: W0930 15:05:37.757484 4783 reflector.go:561] object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-2bv5p": failed to list *v1.Secret: secrets "ovnnorthd-ovnnorthd-dockercfg-2bv5p" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 30 15:05:37 crc kubenswrapper[4783]: E0930 15:05:37.757534 4783 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"ovnnorthd-ovnnorthd-dockercfg-2bv5p\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovnnorthd-ovnnorthd-dockercfg-2bv5p\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.757638 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.757924 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.820744 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.831842 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.831928 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.831997 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.832048 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-config\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.832078 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-scripts\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.832109 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw9cp\" (UniqueName: \"kubernetes.io/projected/05e03ae1-de6e-4542-a5db-62906d31ed81-kube-api-access-qw9cp\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.832159 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933478 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933635 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933753 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-config\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933782 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-scripts\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933829 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw9cp\" (UniqueName: \"kubernetes.io/projected/05e03ae1-de6e-4542-a5db-62906d31ed81-kube-api-access-qw9cp\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933912 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.933964 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.935651 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.936050 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-config\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.937060 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05e03ae1-de6e-4542-a5db-62906d31ed81-scripts\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.940158 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.941172 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.941370 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e03ae1-de6e-4542-a5db-62906d31ed81-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:37 crc kubenswrapper[4783]: I0930 15:05:37.955492 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw9cp\" (UniqueName: \"kubernetes.io/projected/05e03ae1-de6e-4542-a5db-62906d31ed81-kube-api-access-qw9cp\") pod \"ovn-northd-0\" (UID: \"05e03ae1-de6e-4542-a5db-62906d31ed81\") " pod="openstack/ovn-northd-0" Sep 30 15:05:38 crc kubenswrapper[4783]: I0930 15:05:38.779307 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-2bv5p" Sep 30 15:05:38 crc kubenswrapper[4783]: I0930 15:05:38.785919 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.261424 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 15:05:39 crc kubenswrapper[4783]: W0930 15:05:39.263305 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e03ae1_de6e_4542_a5db_62906d31ed81.slice/crio-cff7e3470bfa53e31eb3cb026646d1d1ed57d61f228fdc2ec34e50bec3a21056 WatchSource:0}: Error finding container cff7e3470bfa53e31eb3cb026646d1d1ed57d61f228fdc2ec34e50bec3a21056: Status 404 returned error can't find the container with id cff7e3470bfa53e31eb3cb026646d1d1ed57d61f228fdc2ec34e50bec3a21056 Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.344715 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.412086 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.412469 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="dnsmasq-dns" containerID="cri-o://1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98" gracePeriod=10 Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.820743 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.968714 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config\") pod \"6c0c4f74-bced-436a-b552-580e064c6d70\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.968818 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f25mm\" (UniqueName: \"kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm\") pod \"6c0c4f74-bced-436a-b552-580e064c6d70\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.969018 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc\") pod \"6c0c4f74-bced-436a-b552-580e064c6d70\" (UID: \"6c0c4f74-bced-436a-b552-580e064c6d70\") " Sep 30 15:05:39 crc kubenswrapper[4783]: I0930 15:05:39.973057 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm" (OuterVolumeSpecName: "kube-api-access-f25mm") pod "6c0c4f74-bced-436a-b552-580e064c6d70" (UID: "6c0c4f74-bced-436a-b552-580e064c6d70"). InnerVolumeSpecName "kube-api-access-f25mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.017084 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6c0c4f74-bced-436a-b552-580e064c6d70" (UID: "6c0c4f74-bced-436a-b552-580e064c6d70"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.030708 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config" (OuterVolumeSpecName: "config") pod "6c0c4f74-bced-436a-b552-580e064c6d70" (UID: "6c0c4f74-bced-436a-b552-580e064c6d70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.071343 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.071396 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c0c4f74-bced-436a-b552-580e064c6d70-config\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.071410 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f25mm\" (UniqueName: \"kubernetes.io/projected/6c0c4f74-bced-436a-b552-580e064c6d70-kube-api-access-f25mm\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.130197 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"05e03ae1-de6e-4542-a5db-62906d31ed81","Type":"ContainerStarted","Data":"233bf428defa9f51940f2beffca851f80edf82eca708ce6c733bdc048daf7b99"} Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.130422 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"05e03ae1-de6e-4542-a5db-62906d31ed81","Type":"ContainerStarted","Data":"3e995a2b882ffbdbc0328cc93f645797710024930e0ec2fc710a16327fea4ebc"} Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.130497 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"05e03ae1-de6e-4542-a5db-62906d31ed81","Type":"ContainerStarted","Data":"cff7e3470bfa53e31eb3cb026646d1d1ed57d61f228fdc2ec34e50bec3a21056"} Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.130821 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.134286 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" event={"ID":"6c0c4f74-bced-436a-b552-580e064c6d70","Type":"ContainerDied","Data":"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98"} Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.134341 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.134355 4783 scope.go:117] "RemoveContainer" containerID="1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.134211 4783 generic.go:334] "Generic (PLEG): container finished" podID="6c0c4f74-bced-436a-b552-580e064c6d70" containerID="1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98" exitCode=0 Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.135377 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6885566dd9-qx49q" event={"ID":"6c0c4f74-bced-436a-b552-580e064c6d70","Type":"ContainerDied","Data":"c98a968c7d5ccac70b55aa886d492ade94fce2e1b482d778506e919e1970b057"} Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.165638 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.165607721 podStartE2EDuration="3.165607721s" podCreationTimestamp="2025-09-30 15:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:05:40.152820742 +0000 UTC m=+5440.084287099" watchObservedRunningTime="2025-09-30 15:05:40.165607721 +0000 UTC m=+5440.097074048" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.170073 4783 scope.go:117] "RemoveContainer" containerID="5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.183184 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.189590 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6885566dd9-qx49q"] Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.202628 4783 scope.go:117] "RemoveContainer" containerID="1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98" Sep 30 15:05:40 crc kubenswrapper[4783]: E0930 15:05:40.203067 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98\": container with ID starting with 1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98 not found: ID does not exist" containerID="1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.203098 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98"} err="failed to get container status \"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98\": rpc error: code = NotFound desc = could not find container \"1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98\": container with ID starting with 1f47432459fe7ca9fafeb906f2809df372acab3eef117d060463f6dbafd28e98 not found: ID does not exist" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.203119 4783 scope.go:117] "RemoveContainer" containerID="5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815" Sep 30 15:05:40 crc kubenswrapper[4783]: E0930 15:05:40.203428 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815\": container with ID starting with 5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815 not found: ID does not exist" containerID="5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.203550 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815"} err="failed to get container status \"5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815\": rpc error: code = NotFound desc = could not find container \"5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815\": container with ID starting with 5bcf7625dc9e90f263d2c6a227eb1e708333e923471df314f1c2b6bf44db6815 not found: ID does not exist" Sep 30 15:05:40 crc kubenswrapper[4783]: I0930 15:05:40.853957 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" path="/var/lib/kubelet/pods/6c0c4f74-bced-436a-b552-580e064c6d70/volumes" Sep 30 15:05:42 crc kubenswrapper[4783]: I0930 15:05:42.843575 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.008489 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fc8rm"] Sep 30 15:05:43 crc kubenswrapper[4783]: E0930 15:05:43.009338 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="dnsmasq-dns" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.009363 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="dnsmasq-dns" Sep 30 15:05:43 crc kubenswrapper[4783]: E0930 15:05:43.009387 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="init" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.009397 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="init" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.009624 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c0c4f74-bced-436a-b552-580e064c6d70" containerName="dnsmasq-dns" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.010387 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.017578 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fc8rm"] Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.121205 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmmtt\" (UniqueName: \"kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt\") pod \"keystone-db-create-fc8rm\" (UID: \"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa\") " pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.160502 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8"} Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.223470 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmmtt\" (UniqueName: \"kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt\") pod \"keystone-db-create-fc8rm\" (UID: \"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa\") " pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.244643 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmmtt\" (UniqueName: \"kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt\") pod \"keystone-db-create-fc8rm\" (UID: \"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa\") " pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.331702 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:43 crc kubenswrapper[4783]: I0930 15:05:43.759912 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fc8rm"] Sep 30 15:05:44 crc kubenswrapper[4783]: I0930 15:05:44.174540 4783 generic.go:334] "Generic (PLEG): container finished" podID="3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" containerID="f88926f9b782c653bb5c6a6c84b79893bade5bbf5f1ad01c4eea43655bf385fe" exitCode=0 Sep 30 15:05:44 crc kubenswrapper[4783]: I0930 15:05:44.174800 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fc8rm" event={"ID":"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa","Type":"ContainerDied","Data":"f88926f9b782c653bb5c6a6c84b79893bade5bbf5f1ad01c4eea43655bf385fe"} Sep 30 15:05:44 crc kubenswrapper[4783]: I0930 15:05:44.174830 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fc8rm" event={"ID":"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa","Type":"ContainerStarted","Data":"dc3764ea1e8dfe5b34225781edf4d98435d307171587ce3a5e39f1c72a46a60a"} Sep 30 15:05:45 crc kubenswrapper[4783]: I0930 15:05:45.572716 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:45 crc kubenswrapper[4783]: I0930 15:05:45.664951 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmmtt\" (UniqueName: \"kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt\") pod \"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa\" (UID: \"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa\") " Sep 30 15:05:45 crc kubenswrapper[4783]: I0930 15:05:45.670357 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt" (OuterVolumeSpecName: "kube-api-access-fmmtt") pod "3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" (UID: "3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa"). InnerVolumeSpecName "kube-api-access-fmmtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:05:45 crc kubenswrapper[4783]: I0930 15:05:45.766748 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmmtt\" (UniqueName: \"kubernetes.io/projected/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa-kube-api-access-fmmtt\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:46 crc kubenswrapper[4783]: I0930 15:05:46.195128 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fc8rm" event={"ID":"3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa","Type":"ContainerDied","Data":"dc3764ea1e8dfe5b34225781edf4d98435d307171587ce3a5e39f1c72a46a60a"} Sep 30 15:05:46 crc kubenswrapper[4783]: I0930 15:05:46.195168 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc3764ea1e8dfe5b34225781edf4d98435d307171587ce3a5e39f1c72a46a60a" Sep 30 15:05:46 crc kubenswrapper[4783]: I0930 15:05:46.195202 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fc8rm" Sep 30 15:05:48 crc kubenswrapper[4783]: I0930 15:05:48.855867 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-snhkr"] Sep 30 15:05:48 crc kubenswrapper[4783]: E0930 15:05:48.856718 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" containerName="mariadb-database-create" Sep 30 15:05:48 crc kubenswrapper[4783]: I0930 15:05:48.856732 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" containerName="mariadb-database-create" Sep 30 15:05:48 crc kubenswrapper[4783]: I0930 15:05:48.856888 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" containerName="mariadb-database-create" Sep 30 15:05:48 crc kubenswrapper[4783]: I0930 15:05:48.858293 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:48 crc kubenswrapper[4783]: I0930 15:05:48.917656 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-snhkr"] Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.018668 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-catalog-content\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.018734 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-utilities\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.018761 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w696j\" (UniqueName: \"kubernetes.io/projected/3825052e-49a3-48c6-aff5-7b30d755ff3f-kube-api-access-w696j\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.119823 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-utilities\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.119878 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w696j\" (UniqueName: \"kubernetes.io/projected/3825052e-49a3-48c6-aff5-7b30d755ff3f-kube-api-access-w696j\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.120000 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-catalog-content\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.120590 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-catalog-content\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.120591 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3825052e-49a3-48c6-aff5-7b30d755ff3f-utilities\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.142103 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w696j\" (UniqueName: \"kubernetes.io/projected/3825052e-49a3-48c6-aff5-7b30d755ff3f-kube-api-access-w696j\") pod \"community-operators-snhkr\" (UID: \"3825052e-49a3-48c6-aff5-7b30d755ff3f\") " pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.221661 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:49 crc kubenswrapper[4783]: I0930 15:05:49.719955 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-snhkr"] Sep 30 15:05:50 crc kubenswrapper[4783]: I0930 15:05:50.228485 4783 generic.go:334] "Generic (PLEG): container finished" podID="3825052e-49a3-48c6-aff5-7b30d755ff3f" containerID="aebdd8218863ebb2c057f21c3b83d0a4a82e771db6a3d4df3bd6141736f16f63" exitCode=0 Sep 30 15:05:50 crc kubenswrapper[4783]: I0930 15:05:50.228559 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snhkr" event={"ID":"3825052e-49a3-48c6-aff5-7b30d755ff3f","Type":"ContainerDied","Data":"aebdd8218863ebb2c057f21c3b83d0a4a82e771db6a3d4df3bd6141736f16f63"} Sep 30 15:05:50 crc kubenswrapper[4783]: I0930 15:05:50.228785 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snhkr" event={"ID":"3825052e-49a3-48c6-aff5-7b30d755ff3f","Type":"ContainerStarted","Data":"4793cf2c465e400e087056fb77c3f2835c2fa672ba02b0e2833cb6fdb2e26a71"} Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.025112 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-1499-account-create-fd5zk"] Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.026998 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.032151 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.042675 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1499-account-create-fd5zk"] Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.094626 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqjjg\" (UniqueName: \"kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg\") pod \"keystone-1499-account-create-fd5zk\" (UID: \"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21\") " pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.196020 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqjjg\" (UniqueName: \"kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg\") pod \"keystone-1499-account-create-fd5zk\" (UID: \"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21\") " pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.217667 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqjjg\" (UniqueName: \"kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg\") pod \"keystone-1499-account-create-fd5zk\" (UID: \"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21\") " pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.365127 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.835699 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 15:05:53 crc kubenswrapper[4783]: I0930 15:05:53.890829 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1499-account-create-fd5zk"] Sep 30 15:05:54 crc kubenswrapper[4783]: I0930 15:05:54.269583 4783 generic.go:334] "Generic (PLEG): container finished" podID="b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" containerID="874c10422a47d9027b8bb8042b3e41b2a21770d27e3a542121038f552f228e01" exitCode=0 Sep 30 15:05:54 crc kubenswrapper[4783]: I0930 15:05:54.269688 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1499-account-create-fd5zk" event={"ID":"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21","Type":"ContainerDied","Data":"874c10422a47d9027b8bb8042b3e41b2a21770d27e3a542121038f552f228e01"} Sep 30 15:05:54 crc kubenswrapper[4783]: I0930 15:05:54.269746 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1499-account-create-fd5zk" event={"ID":"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21","Type":"ContainerStarted","Data":"79dd6aacabc96b4e01da86a32ca325a15ecc18b39aacb38e7466ef595a19c961"} Sep 30 15:05:54 crc kubenswrapper[4783]: I0930 15:05:54.273799 4783 generic.go:334] "Generic (PLEG): container finished" podID="3825052e-49a3-48c6-aff5-7b30d755ff3f" containerID="c84e0c5645edae96d2290ef6185f8e6746ced69ad9fe8952a85deb0f3a1d7634" exitCode=0 Sep 30 15:05:54 crc kubenswrapper[4783]: I0930 15:05:54.273840 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snhkr" event={"ID":"3825052e-49a3-48c6-aff5-7b30d755ff3f","Type":"ContainerDied","Data":"c84e0c5645edae96d2290ef6185f8e6746ced69ad9fe8952a85deb0f3a1d7634"} Sep 30 15:05:55 crc kubenswrapper[4783]: I0930 15:05:55.601803 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:55 crc kubenswrapper[4783]: I0930 15:05:55.740408 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqjjg\" (UniqueName: \"kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg\") pod \"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21\" (UID: \"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21\") " Sep 30 15:05:55 crc kubenswrapper[4783]: I0930 15:05:55.746463 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg" (OuterVolumeSpecName: "kube-api-access-lqjjg") pod "b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" (UID: "b70c4ecb-466c-40e1-bf6a-dc15b77cbb21"). InnerVolumeSpecName "kube-api-access-lqjjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:05:55 crc kubenswrapper[4783]: I0930 15:05:55.842548 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqjjg\" (UniqueName: \"kubernetes.io/projected/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21-kube-api-access-lqjjg\") on node \"crc\" DevicePath \"\"" Sep 30 15:05:56 crc kubenswrapper[4783]: I0930 15:05:56.288164 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1499-account-create-fd5zk" event={"ID":"b70c4ecb-466c-40e1-bf6a-dc15b77cbb21","Type":"ContainerDied","Data":"79dd6aacabc96b4e01da86a32ca325a15ecc18b39aacb38e7466ef595a19c961"} Sep 30 15:05:56 crc kubenswrapper[4783]: I0930 15:05:56.288209 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79dd6aacabc96b4e01da86a32ca325a15ecc18b39aacb38e7466ef595a19c961" Sep 30 15:05:56 crc kubenswrapper[4783]: I0930 15:05:56.288180 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1499-account-create-fd5zk" Sep 30 15:05:56 crc kubenswrapper[4783]: I0930 15:05:56.290632 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snhkr" event={"ID":"3825052e-49a3-48c6-aff5-7b30d755ff3f","Type":"ContainerStarted","Data":"3d1c702fc7c8bbbbbb6f29d069f07d90976de49fc9e8cefa48488486a3ade955"} Sep 30 15:05:56 crc kubenswrapper[4783]: I0930 15:05:56.320173 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-snhkr" podStartSLOduration=3.418527671 podStartE2EDuration="8.320146024s" podCreationTimestamp="2025-09-30 15:05:48 +0000 UTC" firstStartedPulling="2025-09-30 15:05:50.230126302 +0000 UTC m=+5450.161592609" lastFinishedPulling="2025-09-30 15:05:55.131744655 +0000 UTC m=+5455.063210962" observedRunningTime="2025-09-30 15:05:56.314442772 +0000 UTC m=+5456.245909089" watchObservedRunningTime="2025-09-30 15:05:56.320146024 +0000 UTC m=+5456.251612331" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.506938 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-p7bgs"] Sep 30 15:05:58 crc kubenswrapper[4783]: E0930 15:05:58.507560 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" containerName="mariadb-account-create" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.507575 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" containerName="mariadb-account-create" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.507726 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" containerName="mariadb-account-create" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.508348 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.510688 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.510873 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.511161 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.511202 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ndvm5" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.561994 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-p7bgs"] Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.582311 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.582402 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zsvn\" (UniqueName: \"kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.582460 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.684250 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.684406 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.684453 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zsvn\" (UniqueName: \"kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.689327 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.690367 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.702944 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zsvn\" (UniqueName: \"kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn\") pod \"keystone-db-sync-p7bgs\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:58 crc kubenswrapper[4783]: I0930 15:05:58.827893 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:05:59 crc kubenswrapper[4783]: I0930 15:05:59.223081 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:59 crc kubenswrapper[4783]: I0930 15:05:59.223404 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:59 crc kubenswrapper[4783]: I0930 15:05:59.260811 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-p7bgs"] Sep 30 15:05:59 crc kubenswrapper[4783]: W0930 15:05:59.268596 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e1012e0_e669_4a7a_bfc4_23729f8aacc4.slice/crio-66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec WatchSource:0}: Error finding container 66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec: Status 404 returned error can't find the container with id 66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec Sep 30 15:05:59 crc kubenswrapper[4783]: I0930 15:05:59.284524 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:05:59 crc kubenswrapper[4783]: I0930 15:05:59.320534 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p7bgs" event={"ID":"0e1012e0-e669-4a7a-bfc4-23729f8aacc4","Type":"ContainerStarted","Data":"66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec"} Sep 30 15:06:00 crc kubenswrapper[4783]: I0930 15:06:00.331987 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p7bgs" event={"ID":"0e1012e0-e669-4a7a-bfc4-23729f8aacc4","Type":"ContainerStarted","Data":"6ffb85b94ffc170ab97cdd89ee0fb611e2cc83d08251a412198d0f65d4d46e51"} Sep 30 15:06:00 crc kubenswrapper[4783]: I0930 15:06:00.352864 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-p7bgs" podStartSLOduration=2.352836666 podStartE2EDuration="2.352836666s" podCreationTimestamp="2025-09-30 15:05:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:00.348980603 +0000 UTC m=+5460.280446920" watchObservedRunningTime="2025-09-30 15:06:00.352836666 +0000 UTC m=+5460.284302983" Sep 30 15:06:01 crc kubenswrapper[4783]: I0930 15:06:01.343524 4783 generic.go:334] "Generic (PLEG): container finished" podID="0e1012e0-e669-4a7a-bfc4-23729f8aacc4" containerID="6ffb85b94ffc170ab97cdd89ee0fb611e2cc83d08251a412198d0f65d4d46e51" exitCode=0 Sep 30 15:06:01 crc kubenswrapper[4783]: I0930 15:06:01.343589 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p7bgs" event={"ID":"0e1012e0-e669-4a7a-bfc4-23729f8aacc4","Type":"ContainerDied","Data":"6ffb85b94ffc170ab97cdd89ee0fb611e2cc83d08251a412198d0f65d4d46e51"} Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.767004 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.868984 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle\") pod \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.869085 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zsvn\" (UniqueName: \"kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn\") pod \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.869152 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data\") pod \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\" (UID: \"0e1012e0-e669-4a7a-bfc4-23729f8aacc4\") " Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.876390 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn" (OuterVolumeSpecName: "kube-api-access-4zsvn") pod "0e1012e0-e669-4a7a-bfc4-23729f8aacc4" (UID: "0e1012e0-e669-4a7a-bfc4-23729f8aacc4"). InnerVolumeSpecName "kube-api-access-4zsvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.894409 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e1012e0-e669-4a7a-bfc4-23729f8aacc4" (UID: "0e1012e0-e669-4a7a-bfc4-23729f8aacc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.916506 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data" (OuterVolumeSpecName: "config-data") pod "0e1012e0-e669-4a7a-bfc4-23729f8aacc4" (UID: "0e1012e0-e669-4a7a-bfc4-23729f8aacc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.971550 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.971603 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:02 crc kubenswrapper[4783]: I0930 15:06:02.971615 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zsvn\" (UniqueName: \"kubernetes.io/projected/0e1012e0-e669-4a7a-bfc4-23729f8aacc4-kube-api-access-4zsvn\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.364103 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-p7bgs" event={"ID":"0e1012e0-e669-4a7a-bfc4-23729f8aacc4","Type":"ContainerDied","Data":"66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec"} Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.364175 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66b1c6822a981b14f238090cc696f6d3a9eec35c23a4dcea679dc28fe2d269ec" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.364614 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-p7bgs" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.585121 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69d4477d9-4wxtw"] Sep 30 15:06:03 crc kubenswrapper[4783]: E0930 15:06:03.585490 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e1012e0-e669-4a7a-bfc4-23729f8aacc4" containerName="keystone-db-sync" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.585507 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e1012e0-e669-4a7a-bfc4-23729f8aacc4" containerName="keystone-db-sync" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.585708 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e1012e0-e669-4a7a-bfc4-23729f8aacc4" containerName="keystone-db-sync" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.586569 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.660280 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69d4477d9-4wxtw"] Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.669834 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lzrpk"] Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.674327 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.677672 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.677958 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.678321 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.678547 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ndvm5" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.684117 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbkhm\" (UniqueName: \"kubernetes.io/projected/07392d39-a347-4741-9c38-6af20540ee65-kube-api-access-wbkhm\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.684172 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-config\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.684271 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-dns-svc\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.684295 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-sb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.684321 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-nb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.691806 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lzrpk"] Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786109 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-config\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786253 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-dns-svc\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786287 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dxcc\" (UniqueName: \"kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786311 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786336 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-sb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786466 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786523 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-nb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786552 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786604 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786707 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.786835 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbkhm\" (UniqueName: \"kubernetes.io/projected/07392d39-a347-4741-9c38-6af20540ee65-kube-api-access-wbkhm\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.787310 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-config\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.787364 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-sb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.787425 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-ovsdbserver-nb\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.787923 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07392d39-a347-4741-9c38-6af20540ee65-dns-svc\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.818303 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbkhm\" (UniqueName: \"kubernetes.io/projected/07392d39-a347-4741-9c38-6af20540ee65-kube-api-access-wbkhm\") pod \"dnsmasq-dns-69d4477d9-4wxtw\" (UID: \"07392d39-a347-4741-9c38-6af20540ee65\") " pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888397 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888573 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dxcc\" (UniqueName: \"kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888594 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888614 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888633 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.888656 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.891826 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.892365 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.892415 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.894309 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.896017 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.909270 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:03 crc kubenswrapper[4783]: I0930 15:06:03.909632 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dxcc\" (UniqueName: \"kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc\") pod \"keystone-bootstrap-lzrpk\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:04 crc kubenswrapper[4783]: I0930 15:06:04.006799 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:04 crc kubenswrapper[4783]: I0930 15:06:04.343652 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69d4477d9-4wxtw"] Sep 30 15:06:04 crc kubenswrapper[4783]: W0930 15:06:04.347211 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07392d39_a347_4741_9c38_6af20540ee65.slice/crio-40a21f2d8a210d5b1218a8f3bfed6d2d52a922d00c3fdbf1209551138de92b53 WatchSource:0}: Error finding container 40a21f2d8a210d5b1218a8f3bfed6d2d52a922d00c3fdbf1209551138de92b53: Status 404 returned error can't find the container with id 40a21f2d8a210d5b1218a8f3bfed6d2d52a922d00c3fdbf1209551138de92b53 Sep 30 15:06:04 crc kubenswrapper[4783]: I0930 15:06:04.371473 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" event={"ID":"07392d39-a347-4741-9c38-6af20540ee65","Type":"ContainerStarted","Data":"40a21f2d8a210d5b1218a8f3bfed6d2d52a922d00c3fdbf1209551138de92b53"} Sep 30 15:06:04 crc kubenswrapper[4783]: I0930 15:06:04.481056 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lzrpk"] Sep 30 15:06:04 crc kubenswrapper[4783]: W0930 15:06:04.483070 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod839ffd50_f515_4777_a674_29cb6d90fb2e.slice/crio-cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2 WatchSource:0}: Error finding container cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2: Status 404 returned error can't find the container with id cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2 Sep 30 15:06:05 crc kubenswrapper[4783]: I0930 15:06:05.385506 4783 generic.go:334] "Generic (PLEG): container finished" podID="07392d39-a347-4741-9c38-6af20540ee65" containerID="2114c091556beb069ae03d2ca25cc4ad4d62a214463ca7ef102b2e056beeeb22" exitCode=0 Sep 30 15:06:05 crc kubenswrapper[4783]: I0930 15:06:05.386391 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" event={"ID":"07392d39-a347-4741-9c38-6af20540ee65","Type":"ContainerDied","Data":"2114c091556beb069ae03d2ca25cc4ad4d62a214463ca7ef102b2e056beeeb22"} Sep 30 15:06:05 crc kubenswrapper[4783]: I0930 15:06:05.393493 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lzrpk" event={"ID":"839ffd50-f515-4777-a674-29cb6d90fb2e","Type":"ContainerStarted","Data":"ecbca7a37f0349ac6e3754e055ac7dc36a19f55dcd94c4e11836a1f391013a4c"} Sep 30 15:06:05 crc kubenswrapper[4783]: I0930 15:06:05.393545 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lzrpk" event={"ID":"839ffd50-f515-4777-a674-29cb6d90fb2e","Type":"ContainerStarted","Data":"cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2"} Sep 30 15:06:05 crc kubenswrapper[4783]: I0930 15:06:05.448653 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lzrpk" podStartSLOduration=2.448629285 podStartE2EDuration="2.448629285s" podCreationTimestamp="2025-09-30 15:06:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:05.438879473 +0000 UTC m=+5465.370345780" watchObservedRunningTime="2025-09-30 15:06:05.448629285 +0000 UTC m=+5465.380095612" Sep 30 15:06:06 crc kubenswrapper[4783]: I0930 15:06:06.403799 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" event={"ID":"07392d39-a347-4741-9c38-6af20540ee65","Type":"ContainerStarted","Data":"3d11c02df9324eef265af254be57064bf22610f082d7353d8b0c0419691fe230"} Sep 30 15:06:06 crc kubenswrapper[4783]: I0930 15:06:06.428514 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" podStartSLOduration=3.428484547 podStartE2EDuration="3.428484547s" podCreationTimestamp="2025-09-30 15:06:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:06.42264595 +0000 UTC m=+5466.354112307" watchObservedRunningTime="2025-09-30 15:06:06.428484547 +0000 UTC m=+5466.359950874" Sep 30 15:06:07 crc kubenswrapper[4783]: I0930 15:06:07.411169 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:08 crc kubenswrapper[4783]: I0930 15:06:08.424721 4783 generic.go:334] "Generic (PLEG): container finished" podID="839ffd50-f515-4777-a674-29cb6d90fb2e" containerID="ecbca7a37f0349ac6e3754e055ac7dc36a19f55dcd94c4e11836a1f391013a4c" exitCode=0 Sep 30 15:06:08 crc kubenswrapper[4783]: I0930 15:06:08.424817 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lzrpk" event={"ID":"839ffd50-f515-4777-a674-29cb6d90fb2e","Type":"ContainerDied","Data":"ecbca7a37f0349ac6e3754e055ac7dc36a19f55dcd94c4e11836a1f391013a4c"} Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.268498 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-snhkr" Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.331771 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-snhkr"] Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.382167 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.382547 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xrc9l" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="registry-server" containerID="cri-o://1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1" gracePeriod=2 Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.912753 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.923180 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.999769 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.999817 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content\") pod \"9a782e0d-6419-465e-be5e-c33abcf4aedb\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.999871 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.999889 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:09 crc kubenswrapper[4783]: I0930 15:06:09.999944 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.000036 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.000102 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sllc\" (UniqueName: \"kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc\") pod \"9a782e0d-6419-465e-be5e-c33abcf4aedb\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.000121 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dxcc\" (UniqueName: \"kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc\") pod \"839ffd50-f515-4777-a674-29cb6d90fb2e\" (UID: \"839ffd50-f515-4777-a674-29cb6d90fb2e\") " Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.000146 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities\") pod \"9a782e0d-6419-465e-be5e-c33abcf4aedb\" (UID: \"9a782e0d-6419-465e-be5e-c33abcf4aedb\") " Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.000942 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities" (OuterVolumeSpecName: "utilities") pod "9a782e0d-6419-465e-be5e-c33abcf4aedb" (UID: "9a782e0d-6419-465e-be5e-c33abcf4aedb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.010720 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.010860 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts" (OuterVolumeSpecName: "scripts") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.023460 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc" (OuterVolumeSpecName: "kube-api-access-5dxcc") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "kube-api-access-5dxcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.057189 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.060817 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.061258 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc" (OuterVolumeSpecName: "kube-api-access-2sllc") pod "9a782e0d-6419-465e-be5e-c33abcf4aedb" (UID: "9a782e0d-6419-465e-be5e-c33abcf4aedb"). InnerVolumeSpecName "kube-api-access-2sllc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.081357 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data" (OuterVolumeSpecName: "config-data") pod "839ffd50-f515-4777-a674-29cb6d90fb2e" (UID: "839ffd50-f515-4777-a674-29cb6d90fb2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.090203 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a782e0d-6419-465e-be5e-c33abcf4aedb" (UID: "9a782e0d-6419-465e-be5e-c33abcf4aedb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104618 4783 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104660 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sllc\" (UniqueName: \"kubernetes.io/projected/9a782e0d-6419-465e-be5e-c33abcf4aedb-kube-api-access-2sllc\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104671 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dxcc\" (UniqueName: \"kubernetes.io/projected/839ffd50-f515-4777-a674-29cb6d90fb2e-kube-api-access-5dxcc\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104680 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104692 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104702 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a782e0d-6419-465e-be5e-c33abcf4aedb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104710 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104718 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.104726 4783 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/839ffd50-f515-4777-a674-29cb6d90fb2e-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.446362 4783 generic.go:334] "Generic (PLEG): container finished" podID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerID="1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1" exitCode=0 Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.446770 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xrc9l" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.446642 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerDied","Data":"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1"} Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.446896 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xrc9l" event={"ID":"9a782e0d-6419-465e-be5e-c33abcf4aedb","Type":"ContainerDied","Data":"9d9607aed8bb99e0fd5802935b309d5bf1ea5e9fcc6560f92054d89538427ae9"} Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.446935 4783 scope.go:117] "RemoveContainer" containerID="1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.448570 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lzrpk" event={"ID":"839ffd50-f515-4777-a674-29cb6d90fb2e","Type":"ContainerDied","Data":"cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2"} Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.448613 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd90f23dacc5a0e7206c3eec5ce625370f13daea6fdad91f84363ae914be4cb2" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.448676 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lzrpk" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.476351 4783 scope.go:117] "RemoveContainer" containerID="e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.498257 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.518412 4783 scope.go:117] "RemoveContainer" containerID="7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.527957 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xrc9l"] Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.546921 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lzrpk"] Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.553191 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lzrpk"] Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.591755 4783 scope.go:117] "RemoveContainer" containerID="1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.592568 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1\": container with ID starting with 1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1 not found: ID does not exist" containerID="1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.592643 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1"} err="failed to get container status \"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1\": rpc error: code = NotFound desc = could not find container \"1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1\": container with ID starting with 1c0db62c82d69b9e1d1a48fb4242d567db3b40a3e1b1dbfe45054ad6342d14b1 not found: ID does not exist" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.592688 4783 scope.go:117] "RemoveContainer" containerID="e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.593094 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c\": container with ID starting with e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c not found: ID does not exist" containerID="e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.593143 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c"} err="failed to get container status \"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c\": rpc error: code = NotFound desc = could not find container \"e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c\": container with ID starting with e2f2d77d658448bb488d63acf7f083d48a3e51bd8fa03e78bbfbab8728002a4c not found: ID does not exist" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.593166 4783 scope.go:117] "RemoveContainer" containerID="7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.603546 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb\": container with ID starting with 7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb not found: ID does not exist" containerID="7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.603639 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb"} err="failed to get container status \"7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb\": rpc error: code = NotFound desc = could not find container \"7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb\": container with ID starting with 7529024e460155f574d9464bc90465e8b6bfd95db188c73bf8227305ee3689eb not found: ID does not exist" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.658343 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6pgfq"] Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.659375 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="registry-server" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659392 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="registry-server" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.659412 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="839ffd50-f515-4777-a674-29cb6d90fb2e" containerName="keystone-bootstrap" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659422 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="839ffd50-f515-4777-a674-29cb6d90fb2e" containerName="keystone-bootstrap" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.659446 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="extract-utilities" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659454 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="extract-utilities" Sep 30 15:06:10 crc kubenswrapper[4783]: E0930 15:06:10.659520 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="extract-content" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659528 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="extract-content" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659941 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="839ffd50-f515-4777-a674-29cb6d90fb2e" containerName="keystone-bootstrap" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.659977 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" containerName="registry-server" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.661325 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.665193 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.665688 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.665704 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.667755 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ndvm5" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.673425 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6pgfq"] Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.721377 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.721861 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq4mk\" (UniqueName: \"kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.722166 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.722515 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.722687 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.723323 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825349 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825455 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq4mk\" (UniqueName: \"kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825491 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825525 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825543 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.825560 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.830166 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.830173 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.830858 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.831043 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.836949 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.853122 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq4mk\" (UniqueName: \"kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk\") pod \"keystone-bootstrap-6pgfq\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.855114 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="839ffd50-f515-4777-a674-29cb6d90fb2e" path="/var/lib/kubelet/pods/839ffd50-f515-4777-a674-29cb6d90fb2e/volumes" Sep 30 15:06:10 crc kubenswrapper[4783]: I0930 15:06:10.855916 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a782e0d-6419-465e-be5e-c33abcf4aedb" path="/var/lib/kubelet/pods/9a782e0d-6419-465e-be5e-c33abcf4aedb/volumes" Sep 30 15:06:11 crc kubenswrapper[4783]: I0930 15:06:11.004854 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:11 crc kubenswrapper[4783]: I0930 15:06:11.513409 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6pgfq"] Sep 30 15:06:12 crc kubenswrapper[4783]: I0930 15:06:12.480942 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6pgfq" event={"ID":"7d90e695-a604-48e0-bdd9-da9ebd2b5d47","Type":"ContainerStarted","Data":"0086f5804864f6e9fb873fc268f6e6e3cbacaab94ce3fdb870bfd79cb941bbaa"} Sep 30 15:06:12 crc kubenswrapper[4783]: I0930 15:06:12.481534 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6pgfq" event={"ID":"7d90e695-a604-48e0-bdd9-da9ebd2b5d47","Type":"ContainerStarted","Data":"e4f862c144e9afc97583e0ab27eb13cb588147023f8139882577d55317791f3d"} Sep 30 15:06:13 crc kubenswrapper[4783]: I0930 15:06:13.910456 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69d4477d9-4wxtw" Sep 30 15:06:13 crc kubenswrapper[4783]: I0930 15:06:13.940890 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6pgfq" podStartSLOduration=3.940863027 podStartE2EDuration="3.940863027s" podCreationTimestamp="2025-09-30 15:06:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:12.507029 +0000 UTC m=+5472.438495307" watchObservedRunningTime="2025-09-30 15:06:13.940863027 +0000 UTC m=+5473.872329344" Sep 30 15:06:13 crc kubenswrapper[4783]: I0930 15:06:13.965343 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:06:13 crc kubenswrapper[4783]: I0930 15:06:13.965834 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="dnsmasq-dns" containerID="cri-o://9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0" gracePeriod=10 Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.447174 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.502867 4783 generic.go:334] "Generic (PLEG): container finished" podID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerID="9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0" exitCode=0 Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.503399 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" event={"ID":"06c125eb-bc02-42b4-a1d5-523be036ab98","Type":"ContainerDied","Data":"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0"} Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.503445 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" event={"ID":"06c125eb-bc02-42b4-a1d5-523be036ab98","Type":"ContainerDied","Data":"f28d840f1e60f72f3aa434044c4f188abd62b402550c29c657939e17e50556cf"} Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.503473 4783 scope.go:117] "RemoveContainer" containerID="9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.503650 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.506875 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc\") pod \"06c125eb-bc02-42b4-a1d5-523be036ab98\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.507081 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config\") pod \"06c125eb-bc02-42b4-a1d5-523be036ab98\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.507108 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb\") pod \"06c125eb-bc02-42b4-a1d5-523be036ab98\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.507142 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsdfh\" (UniqueName: \"kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh\") pod \"06c125eb-bc02-42b4-a1d5-523be036ab98\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.509597 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb\") pod \"06c125eb-bc02-42b4-a1d5-523be036ab98\" (UID: \"06c125eb-bc02-42b4-a1d5-523be036ab98\") " Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.515465 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh" (OuterVolumeSpecName: "kube-api-access-vsdfh") pod "06c125eb-bc02-42b4-a1d5-523be036ab98" (UID: "06c125eb-bc02-42b4-a1d5-523be036ab98"). InnerVolumeSpecName "kube-api-access-vsdfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.554668 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config" (OuterVolumeSpecName: "config") pod "06c125eb-bc02-42b4-a1d5-523be036ab98" (UID: "06c125eb-bc02-42b4-a1d5-523be036ab98"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.560668 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "06c125eb-bc02-42b4-a1d5-523be036ab98" (UID: "06c125eb-bc02-42b4-a1d5-523be036ab98"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.562809 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "06c125eb-bc02-42b4-a1d5-523be036ab98" (UID: "06c125eb-bc02-42b4-a1d5-523be036ab98"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.563776 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "06c125eb-bc02-42b4-a1d5-523be036ab98" (UID: "06c125eb-bc02-42b4-a1d5-523be036ab98"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.613587 4783 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.614079 4783 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-config\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.614752 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.614828 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsdfh\" (UniqueName: \"kubernetes.io/projected/06c125eb-bc02-42b4-a1d5-523be036ab98-kube-api-access-vsdfh\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.614898 4783 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c125eb-bc02-42b4-a1d5-523be036ab98-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.614830 4783 scope.go:117] "RemoveContainer" containerID="f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.638245 4783 scope.go:117] "RemoveContainer" containerID="9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0" Sep 30 15:06:14 crc kubenswrapper[4783]: E0930 15:06:14.639082 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0\": container with ID starting with 9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0 not found: ID does not exist" containerID="9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.639141 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0"} err="failed to get container status \"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0\": rpc error: code = NotFound desc = could not find container \"9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0\": container with ID starting with 9ff523181d1756d27d2a04f2b5b9c99774e22e8d67ae1a59e6943a9a571a93a0 not found: ID does not exist" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.639181 4783 scope.go:117] "RemoveContainer" containerID="f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9" Sep 30 15:06:14 crc kubenswrapper[4783]: E0930 15:06:14.639787 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9\": container with ID starting with f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9 not found: ID does not exist" containerID="f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.639885 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9"} err="failed to get container status \"f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9\": rpc error: code = NotFound desc = could not find container \"f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9\": container with ID starting with f173613b680e37ca72b7a12856353df35248c01b59b1fbe31f33ce9b726383d9 not found: ID does not exist" Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.858163 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:06:14 crc kubenswrapper[4783]: I0930 15:06:14.858253 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f959578df-vbwjf"] Sep 30 15:06:15 crc kubenswrapper[4783]: I0930 15:06:15.514839 4783 generic.go:334] "Generic (PLEG): container finished" podID="7d90e695-a604-48e0-bdd9-da9ebd2b5d47" containerID="0086f5804864f6e9fb873fc268f6e6e3cbacaab94ce3fdb870bfd79cb941bbaa" exitCode=0 Sep 30 15:06:15 crc kubenswrapper[4783]: I0930 15:06:15.514904 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6pgfq" event={"ID":"7d90e695-a604-48e0-bdd9-da9ebd2b5d47","Type":"ContainerDied","Data":"0086f5804864f6e9fb873fc268f6e6e3cbacaab94ce3fdb870bfd79cb941bbaa"} Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.854310 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" path="/var/lib/kubelet/pods/06c125eb-bc02-42b4-a1d5-523be036ab98/volumes" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.895303 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.954960 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.955040 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq4mk\" (UniqueName: \"kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.955079 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.955237 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.955255 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.955292 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle\") pod \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\" (UID: \"7d90e695-a604-48e0-bdd9-da9ebd2b5d47\") " Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.963015 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.964943 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk" (OuterVolumeSpecName: "kube-api-access-vq4mk") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "kube-api-access-vq4mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.965139 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts" (OuterVolumeSpecName: "scripts") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.965364 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:16 crc kubenswrapper[4783]: I0930 15:06:16.992142 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data" (OuterVolumeSpecName: "config-data") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.002926 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d90e695-a604-48e0-bdd9-da9ebd2b5d47" (UID: "7d90e695-a604-48e0-bdd9-da9ebd2b5d47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.056916 4783 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.056959 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq4mk\" (UniqueName: \"kubernetes.io/projected/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-kube-api-access-vq4mk\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.056974 4783 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.056986 4783 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.057002 4783 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.057014 4783 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d90e695-a604-48e0-bdd9-da9ebd2b5d47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.542256 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6pgfq" event={"ID":"7d90e695-a604-48e0-bdd9-da9ebd2b5d47","Type":"ContainerDied","Data":"e4f862c144e9afc97583e0ab27eb13cb588147023f8139882577d55317791f3d"} Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.542861 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4f862c144e9afc97583e0ab27eb13cb588147023f8139882577d55317791f3d" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.542335 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6pgfq" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.656457 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6d74b95c4d-2tv5q"] Sep 30 15:06:17 crc kubenswrapper[4783]: E0930 15:06:17.657326 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="dnsmasq-dns" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.657349 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="dnsmasq-dns" Sep 30 15:06:17 crc kubenswrapper[4783]: E0930 15:06:17.657393 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d90e695-a604-48e0-bdd9-da9ebd2b5d47" containerName="keystone-bootstrap" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.657399 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d90e695-a604-48e0-bdd9-da9ebd2b5d47" containerName="keystone-bootstrap" Sep 30 15:06:17 crc kubenswrapper[4783]: E0930 15:06:17.657412 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="init" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.657422 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="init" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.657609 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="dnsmasq-dns" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.657630 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d90e695-a604-48e0-bdd9-da9ebd2b5d47" containerName="keystone-bootstrap" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.658459 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.660519 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.665302 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.665494 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ndvm5" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.665623 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.665688 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.665694 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.686480 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d74b95c4d-2tv5q"] Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.769513 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-config-data\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.769590 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-internal-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.769650 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss5rx\" (UniqueName: \"kubernetes.io/projected/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-kube-api-access-ss5rx\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.769813 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-credential-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.770085 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-fernet-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.770244 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-public-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.770432 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-scripts\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.770602 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-combined-ca-bundle\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.872670 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-fernet-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.872760 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-public-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.872884 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-scripts\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.872950 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-combined-ca-bundle\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.873034 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-config-data\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.873100 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-internal-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.873164 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss5rx\" (UniqueName: \"kubernetes.io/projected/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-kube-api-access-ss5rx\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.873255 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-credential-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.877528 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-fernet-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.877935 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-public-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.878415 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-combined-ca-bundle\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.880633 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-scripts\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.880912 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-config-data\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.881900 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-credential-keys\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.883118 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-internal-tls-certs\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.889990 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss5rx\" (UniqueName: \"kubernetes.io/projected/4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf-kube-api-access-ss5rx\") pod \"keystone-6d74b95c4d-2tv5q\" (UID: \"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf\") " pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:17 crc kubenswrapper[4783]: I0930 15:06:17.985004 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:18 crc kubenswrapper[4783]: I0930 15:06:18.424478 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d74b95c4d-2tv5q"] Sep 30 15:06:18 crc kubenswrapper[4783]: I0930 15:06:18.552799 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d74b95c4d-2tv5q" event={"ID":"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf","Type":"ContainerStarted","Data":"6cb90a44dae73be6d2fc42674d803dc3111a3fe5cf53527a5556d68fcd205e90"} Sep 30 15:06:19 crc kubenswrapper[4783]: I0930 15:06:19.345182 4783 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f959578df-vbwjf" podUID="06c125eb-bc02-42b4-a1d5-523be036ab98" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.16:5353: i/o timeout" Sep 30 15:06:19 crc kubenswrapper[4783]: I0930 15:06:19.564202 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d74b95c4d-2tv5q" event={"ID":"4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf","Type":"ContainerStarted","Data":"00acfae64c833331d4493406e84e919b0bf3b048dda2b9715913667019a2157a"} Sep 30 15:06:19 crc kubenswrapper[4783]: I0930 15:06:19.583775 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6d74b95c4d-2tv5q" podStartSLOduration=2.583760163 podStartE2EDuration="2.583760163s" podCreationTimestamp="2025-09-30 15:06:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:19.58057009 +0000 UTC m=+5479.512036457" watchObservedRunningTime="2025-09-30 15:06:19.583760163 +0000 UTC m=+5479.515226470" Sep 30 15:06:20 crc kubenswrapper[4783]: I0930 15:06:20.573377 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:49 crc kubenswrapper[4783]: I0930 15:06:49.531845 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6d74b95c4d-2tv5q" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.103017 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.104569 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.107263 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.107359 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-g5vbl" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.108497 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.123957 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.220596 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config-secret\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.220693 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.220747 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.220823 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbtwl\" (UniqueName: \"kubernetes.io/projected/d95cafd0-abef-4749-b89f-388031d454ad-kube-api-access-dbtwl\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.324314 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config-secret\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.324852 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.325093 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.325218 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbtwl\" (UniqueName: \"kubernetes.io/projected/d95cafd0-abef-4749-b89f-388031d454ad-kube-api-access-dbtwl\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.326258 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.333215 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.338119 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d95cafd0-abef-4749-b89f-388031d454ad-openstack-config-secret\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.344168 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbtwl\" (UniqueName: \"kubernetes.io/projected/d95cafd0-abef-4749-b89f-388031d454ad-kube-api-access-dbtwl\") pod \"openstackclient\" (UID: \"d95cafd0-abef-4749-b89f-388031d454ad\") " pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.431962 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 15:06:53 crc kubenswrapper[4783]: I0930 15:06:53.900685 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 15:06:54 crc kubenswrapper[4783]: I0930 15:06:54.894966 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d95cafd0-abef-4749-b89f-388031d454ad","Type":"ContainerStarted","Data":"2be4fa5eabde033b3f641e98bf377fa732c5796dc38c43bada4dc8c6908875d9"} Sep 30 15:06:54 crc kubenswrapper[4783]: I0930 15:06:54.895045 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d95cafd0-abef-4749-b89f-388031d454ad","Type":"ContainerStarted","Data":"dfbdc9aa3b7ab3c58529a817a04fb9981f3730d88e5d43aaafd4f34d2f0d6243"} Sep 30 15:06:54 crc kubenswrapper[4783]: I0930 15:06:54.918283 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.918250396 podStartE2EDuration="1.918250396s" podCreationTimestamp="2025-09-30 15:06:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 15:06:54.911106676 +0000 UTC m=+5514.842572983" watchObservedRunningTime="2025-09-30 15:06:54.918250396 +0000 UTC m=+5514.849716733" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.174188 4783 scope.go:117] "RemoveContainer" containerID="b352dd1708ffe963c936f96c1dc20d8db78cdea7a42ff7a1f430a571af3a2d66" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.199506 4783 scope.go:117] "RemoveContainer" containerID="a142eefc32e30ef4b170b0f51d837aab086b45b10515b5318c56dbb875d44cc4" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.263751 4783 scope.go:117] "RemoveContainer" containerID="9e1a8c7269f4c394d200b1aec28eaf63800e57021d525e2e45d8dc4f1017b613" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.301722 4783 scope.go:117] "RemoveContainer" containerID="8aaa93107d3437ed7447efad8197bb9e8a99539dc42ac48ee7aa93c781f9c4e4" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.337792 4783 scope.go:117] "RemoveContainer" containerID="07c04b2f4764660ea53a809cd72f8b9543b24c03bcdfec5241fc510a5db69a46" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.381436 4783 scope.go:117] "RemoveContainer" containerID="c41b7ae90cd2eeb11e53e5cfbcc315050dc20d7a6b532932264c216db218f1bf" Sep 30 15:07:19 crc kubenswrapper[4783]: I0930 15:07:19.417936 4783 scope.go:117] "RemoveContainer" containerID="fe6e7736fc2b95310bd323ad89eb8c190828d40000212a84e0be4d11f12a4af4" Sep 30 15:08:07 crc kubenswrapper[4783]: I0930 15:08:07.673630 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:08:07 crc kubenswrapper[4783]: I0930 15:08:07.674541 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:08:37 crc kubenswrapper[4783]: I0930 15:08:37.674213 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:08:37 crc kubenswrapper[4783]: I0930 15:08:37.674918 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.045358 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7dfk5/must-gather-w7qnh"] Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.047166 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.052429 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-7dfk5"/"openshift-service-ca.crt" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.052617 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-7dfk5"/"default-dockercfg-w69g9" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.053079 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-7dfk5"/"kube-root-ca.crt" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.062608 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-7dfk5/must-gather-w7qnh"] Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.132248 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvxj7\" (UniqueName: \"kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.132323 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.233995 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvxj7\" (UniqueName: \"kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.234068 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.234496 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.257212 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvxj7\" (UniqueName: \"kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7\") pod \"must-gather-w7qnh\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.370733 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:09:05 crc kubenswrapper[4783]: I0930 15:09:05.840213 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-7dfk5/must-gather-w7qnh"] Sep 30 15:09:06 crc kubenswrapper[4783]: I0930 15:09:06.102485 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" event={"ID":"028a96da-a13d-45eb-beb2-fcf894166faf","Type":"ContainerStarted","Data":"96d8bea2c9f7a028818423016d2bf65d91a912c30278fff69bd85aa0929da289"} Sep 30 15:09:07 crc kubenswrapper[4783]: I0930 15:09:07.674001 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:09:07 crc kubenswrapper[4783]: I0930 15:09:07.674389 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:09:07 crc kubenswrapper[4783]: I0930 15:09:07.674448 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 15:09:07 crc kubenswrapper[4783]: I0930 15:09:07.675162 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 15:09:07 crc kubenswrapper[4783]: I0930 15:09:07.675210 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8" gracePeriod=600 Sep 30 15:09:08 crc kubenswrapper[4783]: I0930 15:09:08.126996 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8" exitCode=0 Sep 30 15:09:08 crc kubenswrapper[4783]: I0930 15:09:08.127312 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8"} Sep 30 15:09:08 crc kubenswrapper[4783]: I0930 15:09:08.127345 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerStarted","Data":"04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2"} Sep 30 15:09:08 crc kubenswrapper[4783]: I0930 15:09:08.127363 4783 scope.go:117] "RemoveContainer" containerID="69bf5f15336c54c7c7a69887820e604373b62d2497989a5f628cc61697be8d06" Sep 30 15:09:10 crc kubenswrapper[4783]: I0930 15:09:10.143989 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" event={"ID":"028a96da-a13d-45eb-beb2-fcf894166faf","Type":"ContainerStarted","Data":"6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc"} Sep 30 15:09:10 crc kubenswrapper[4783]: I0930 15:09:10.144531 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" event={"ID":"028a96da-a13d-45eb-beb2-fcf894166faf","Type":"ContainerStarted","Data":"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461"} Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.431991 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" podStartSLOduration=3.757789205 podStartE2EDuration="7.431973045s" podCreationTimestamp="2025-09-30 15:09:05 +0000 UTC" firstStartedPulling="2025-09-30 15:09:05.850127045 +0000 UTC m=+5645.781593352" lastFinishedPulling="2025-09-30 15:09:09.524310885 +0000 UTC m=+5649.455777192" observedRunningTime="2025-09-30 15:09:10.169604196 +0000 UTC m=+5650.101070503" watchObservedRunningTime="2025-09-30 15:09:12.431973045 +0000 UTC m=+5652.363439352" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.435398 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-pcqgn"] Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.436406 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.570319 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.570413 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brmzs\" (UniqueName: \"kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.672064 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.672176 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brmzs\" (UniqueName: \"kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.672180 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.691452 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brmzs\" (UniqueName: \"kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs\") pod \"crc-debug-pcqgn\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:12 crc kubenswrapper[4783]: I0930 15:09:12.753339 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:09:13 crc kubenswrapper[4783]: I0930 15:09:13.183429 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" event={"ID":"affa45e0-8c08-4622-a117-91dbe59491b2","Type":"ContainerStarted","Data":"5dffde6778baeae84234341085bffb468919851b34fbec661aa1fa5ca5696dce"} Sep 30 15:09:24 crc kubenswrapper[4783]: I0930 15:09:24.307867 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" event={"ID":"affa45e0-8c08-4622-a117-91dbe59491b2","Type":"ContainerStarted","Data":"687251fd995a3a59b3310088e2c14925846c94d03d6edfc1aec04a2fbe58b933"} Sep 30 15:09:24 crc kubenswrapper[4783]: I0930 15:09:24.322604 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" podStartSLOduration=2.016637935 podStartE2EDuration="12.322588064s" podCreationTimestamp="2025-09-30 15:09:12 +0000 UTC" firstStartedPulling="2025-09-30 15:09:12.807685974 +0000 UTC m=+5652.739152281" lastFinishedPulling="2025-09-30 15:09:23.113636103 +0000 UTC m=+5663.045102410" observedRunningTime="2025-09-30 15:09:24.319416753 +0000 UTC m=+5664.250883060" watchObservedRunningTime="2025-09-30 15:09:24.322588064 +0000 UTC m=+5664.254054371" Sep 30 15:09:56 crc kubenswrapper[4783]: I0930 15:09:56.711069 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69d4477d9-4wxtw_07392d39-a347-4741-9c38-6af20540ee65/init/0.log" Sep 30 15:09:56 crc kubenswrapper[4783]: I0930 15:09:56.897625 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69d4477d9-4wxtw_07392d39-a347-4741-9c38-6af20540ee65/dnsmasq-dns/0.log" Sep 30 15:09:56 crc kubenswrapper[4783]: I0930 15:09:56.910083 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69d4477d9-4wxtw_07392d39-a347-4741-9c38-6af20540ee65/init/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.100056 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-1499-account-create-fd5zk_b70c4ecb-466c-40e1-bf6a-dc15b77cbb21/mariadb-account-create/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.208066 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6d74b95c4d-2tv5q_4e00b9c3-1faa-42ab-a5ed-bd249fee9fcf/keystone-api/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.409980 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bootstrap-6pgfq_7d90e695-a604-48e0-bdd9-da9ebd2b5d47/keystone-bootstrap/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.564257 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-create-fc8rm_3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa/mariadb-database-create/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.737473 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-sync-p7bgs_0e1012e0-e669-4a7a-bfc4-23729f8aacc4/keystone-db-sync/0.log" Sep 30 15:09:57 crc kubenswrapper[4783]: I0930 15:09:57.966730 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_75ce7228-295e-4058-bb02-2750ffd56b4e/adoption/0.log" Sep 30 15:09:58 crc kubenswrapper[4783]: I0930 15:09:58.269796 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fa616c4a-606e-4d41-968e-9e9f6b288556/mysql-bootstrap/0.log" Sep 30 15:09:58 crc kubenswrapper[4783]: I0930 15:09:58.521692 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fa616c4a-606e-4d41-968e-9e9f6b288556/galera/0.log" Sep 30 15:09:58 crc kubenswrapper[4783]: I0930 15:09:58.573659 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fa616c4a-606e-4d41-968e-9e9f6b288556/mysql-bootstrap/0.log" Sep 30 15:09:58 crc kubenswrapper[4783]: I0930 15:09:58.813437 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95cad87b-982f-424b-a758-5058c04ea9db/mysql-bootstrap/0.log" Sep 30 15:09:58 crc kubenswrapper[4783]: I0930 15:09:58.969728 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d51eac18-20ce-4903-ba61-35e4df01c2f5/memcached/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.055520 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95cad87b-982f-424b-a758-5058c04ea9db/mysql-bootstrap/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.069062 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95cad87b-982f-424b-a758-5058c04ea9db/galera/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.244887 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_d95cafd0-abef-4749-b89f-388031d454ad/openstackclient/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.621013 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_fd34392e-1155-4e74-994f-5b64a49ed9cb/adoption/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.743107 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_05e03ae1-de6e-4542-a5db-62906d31ed81/openstack-network-exporter/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.840507 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_05e03ae1-de6e-4542-a5db-62906d31ed81/ovn-northd/0.log" Sep 30 15:09:59 crc kubenswrapper[4783]: I0930 15:09:59.953949 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b67e5930-bf3a-4fb8-90f9-7a3e3446050d/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.014508 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b67e5930-bf3a-4fb8-90f9-7a3e3446050d/ovsdbserver-nb/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.157456 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_bba4023a-311b-4de2-b9fd-6ea88b42f47f/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.181369 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_bba4023a-311b-4de2-b9fd-6ea88b42f47f/ovsdbserver-nb/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.315909 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_2476cec9-1ee6-438b-9ccf-7a3fa57d474a/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.430119 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_2476cec9-1ee6-438b-9ccf-7a3fa57d474a/ovsdbserver-nb/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.523093 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748/ovsdbserver-sb/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.553351 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a0a8f3d9-de5d-4eb4-a8b8-4a85fea71748/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.631643 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_315aa6a3-f75b-41b5-9aef-f1d4b659ebab/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.713696 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_315aa6a3-f75b-41b5-9aef-f1d4b659ebab/ovsdbserver-sb/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.820023 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_1a586697-ba7d-4413-8340-e9cbd3ea7424/openstack-network-exporter/0.log" Sep 30 15:10:00 crc kubenswrapper[4783]: I0930 15:10:00.929368 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_1a586697-ba7d-4413-8340-e9cbd3ea7424/ovsdbserver-sb/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.036005 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e175dc5c-270b-46c8-b5cb-d95fab2b9a92/setup-container/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.231453 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e175dc5c-270b-46c8-b5cb-d95fab2b9a92/setup-container/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.249412 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ed3bb681-a6c6-4336-8664-36e153896c36/setup-container/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.254020 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e175dc5c-270b-46c8-b5cb-d95fab2b9a92/rabbitmq/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.465025 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ed3bb681-a6c6-4336-8664-36e153896c36/rabbitmq/0.log" Sep 30 15:10:01 crc kubenswrapper[4783]: I0930 15:10:01.487082 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ed3bb681-a6c6-4336-8664-36e153896c36/setup-container/0.log" Sep 30 15:10:47 crc kubenswrapper[4783]: I0930 15:10:47.027723 4783 generic.go:334] "Generic (PLEG): container finished" podID="affa45e0-8c08-4622-a117-91dbe59491b2" containerID="687251fd995a3a59b3310088e2c14925846c94d03d6edfc1aec04a2fbe58b933" exitCode=0 Sep 30 15:10:47 crc kubenswrapper[4783]: I0930 15:10:47.027989 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" event={"ID":"affa45e0-8c08-4622-a117-91dbe59491b2","Type":"ContainerDied","Data":"687251fd995a3a59b3310088e2c14925846c94d03d6edfc1aec04a2fbe58b933"} Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.136955 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.172125 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-pcqgn"] Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.179380 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-pcqgn"] Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.213876 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host\") pod \"affa45e0-8c08-4622-a117-91dbe59491b2\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.213948 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brmzs\" (UniqueName: \"kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs\") pod \"affa45e0-8c08-4622-a117-91dbe59491b2\" (UID: \"affa45e0-8c08-4622-a117-91dbe59491b2\") " Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.214023 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host" (OuterVolumeSpecName: "host") pod "affa45e0-8c08-4622-a117-91dbe59491b2" (UID: "affa45e0-8c08-4622-a117-91dbe59491b2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.214336 4783 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/affa45e0-8c08-4622-a117-91dbe59491b2-host\") on node \"crc\" DevicePath \"\"" Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.220498 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs" (OuterVolumeSpecName: "kube-api-access-brmzs") pod "affa45e0-8c08-4622-a117-91dbe59491b2" (UID: "affa45e0-8c08-4622-a117-91dbe59491b2"). InnerVolumeSpecName "kube-api-access-brmzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.315161 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brmzs\" (UniqueName: \"kubernetes.io/projected/affa45e0-8c08-4622-a117-91dbe59491b2-kube-api-access-brmzs\") on node \"crc\" DevicePath \"\"" Sep 30 15:10:48 crc kubenswrapper[4783]: I0930 15:10:48.853346 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="affa45e0-8c08-4622-a117-91dbe59491b2" path="/var/lib/kubelet/pods/affa45e0-8c08-4622-a117-91dbe59491b2/volumes" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.048558 4783 scope.go:117] "RemoveContainer" containerID="687251fd995a3a59b3310088e2c14925846c94d03d6edfc1aec04a2fbe58b933" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.048891 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-pcqgn" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.344981 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-kfg4m"] Sep 30 15:10:49 crc kubenswrapper[4783]: E0930 15:10:49.345657 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="affa45e0-8c08-4622-a117-91dbe59491b2" containerName="container-00" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.345670 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="affa45e0-8c08-4622-a117-91dbe59491b2" containerName="container-00" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.345862 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="affa45e0-8c08-4622-a117-91dbe59491b2" containerName="container-00" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.346383 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.534280 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.534938 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzf59\" (UniqueName: \"kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.636512 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.636735 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzf59\" (UniqueName: \"kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.636647 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.675321 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzf59\" (UniqueName: \"kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59\") pod \"crc-debug-kfg4m\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: I0930 15:10:49.963586 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:49 crc kubenswrapper[4783]: W0930 15:10:49.998539 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1eed8e23_0cb4_4c1c_9b49_137f20acbba4.slice/crio-1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484 WatchSource:0}: Error finding container 1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484: Status 404 returned error can't find the container with id 1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484 Sep 30 15:10:50 crc kubenswrapper[4783]: I0930 15:10:50.064761 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" event={"ID":"1eed8e23-0cb4-4c1c-9b49-137f20acbba4","Type":"ContainerStarted","Data":"1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484"} Sep 30 15:10:51 crc kubenswrapper[4783]: I0930 15:10:51.080572 4783 generic.go:334] "Generic (PLEG): container finished" podID="1eed8e23-0cb4-4c1c-9b49-137f20acbba4" containerID="427c3e1e2338e5292ce7cb8c947b373b2330e914d7db969ad76ec1bbc1a5e33a" exitCode=0 Sep 30 15:10:51 crc kubenswrapper[4783]: I0930 15:10:51.080693 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" event={"ID":"1eed8e23-0cb4-4c1c-9b49-137f20acbba4","Type":"ContainerDied","Data":"427c3e1e2338e5292ce7cb8c947b373b2330e914d7db969ad76ec1bbc1a5e33a"} Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.173030 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.280333 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host\") pod \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.280456 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzf59\" (UniqueName: \"kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59\") pod \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\" (UID: \"1eed8e23-0cb4-4c1c-9b49-137f20acbba4\") " Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.280476 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host" (OuterVolumeSpecName: "host") pod "1eed8e23-0cb4-4c1c-9b49-137f20acbba4" (UID: "1eed8e23-0cb4-4c1c-9b49-137f20acbba4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.280912 4783 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-host\") on node \"crc\" DevicePath \"\"" Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.290194 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59" (OuterVolumeSpecName: "kube-api-access-wzf59") pod "1eed8e23-0cb4-4c1c-9b49-137f20acbba4" (UID: "1eed8e23-0cb4-4c1c-9b49-137f20acbba4"). InnerVolumeSpecName "kube-api-access-wzf59". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:10:52 crc kubenswrapper[4783]: I0930 15:10:52.382069 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzf59\" (UniqueName: \"kubernetes.io/projected/1eed8e23-0cb4-4c1c-9b49-137f20acbba4-kube-api-access-wzf59\") on node \"crc\" DevicePath \"\"" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.097470 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" event={"ID":"1eed8e23-0cb4-4c1c-9b49-137f20acbba4","Type":"ContainerDied","Data":"1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484"} Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.097722 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ed023ec6a6884a8093ad706c0b68d51c9a06541858b6a6e6c8b065438afe484" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.097861 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-kfg4m" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.220726 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:10:53 crc kubenswrapper[4783]: E0930 15:10:53.221070 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eed8e23-0cb4-4c1c-9b49-137f20acbba4" containerName="container-00" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.221082 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eed8e23-0cb4-4c1c-9b49-137f20acbba4" containerName="container-00" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.221280 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eed8e23-0cb4-4c1c-9b49-137f20acbba4" containerName="container-00" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.222688 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.237186 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.403467 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.403560 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp2tw\" (UniqueName: \"kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.403605 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.504828 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.504993 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.505418 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.505485 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.505893 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp2tw\" (UniqueName: \"kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.531201 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp2tw\" (UniqueName: \"kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw\") pod \"redhat-marketplace-mhk26\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:53 crc kubenswrapper[4783]: I0930 15:10:53.555787 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:10:54 crc kubenswrapper[4783]: I0930 15:10:54.064910 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:10:54 crc kubenswrapper[4783]: I0930 15:10:54.107481 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerStarted","Data":"778e565a13afe60b1fb7ef86d02ea91857c1de6f51b48381281dd12b5f1d6ca4"} Sep 30 15:10:55 crc kubenswrapper[4783]: I0930 15:10:55.116255 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerID="c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e" exitCode=0 Sep 30 15:10:55 crc kubenswrapper[4783]: I0930 15:10:55.116318 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerDied","Data":"c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e"} Sep 30 15:10:55 crc kubenswrapper[4783]: I0930 15:10:55.117944 4783 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 15:10:56 crc kubenswrapper[4783]: I0930 15:10:56.128638 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerStarted","Data":"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6"} Sep 30 15:10:57 crc kubenswrapper[4783]: I0930 15:10:57.136737 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerID="6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6" exitCode=0 Sep 30 15:10:57 crc kubenswrapper[4783]: I0930 15:10:57.137062 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerDied","Data":"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6"} Sep 30 15:10:58 crc kubenswrapper[4783]: I0930 15:10:58.149888 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerStarted","Data":"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245"} Sep 30 15:10:58 crc kubenswrapper[4783]: I0930 15:10:58.177247 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mhk26" podStartSLOduration=2.647802814 podStartE2EDuration="5.177197294s" podCreationTimestamp="2025-09-30 15:10:53 +0000 UTC" firstStartedPulling="2025-09-30 15:10:55.117723057 +0000 UTC m=+5755.049189354" lastFinishedPulling="2025-09-30 15:10:57.647117527 +0000 UTC m=+5757.578583834" observedRunningTime="2025-09-30 15:10:58.167400091 +0000 UTC m=+5758.098866408" watchObservedRunningTime="2025-09-30 15:10:58.177197294 +0000 UTC m=+5758.108663601" Sep 30 15:10:58 crc kubenswrapper[4783]: I0930 15:10:58.322954 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-kfg4m"] Sep 30 15:10:58 crc kubenswrapper[4783]: I0930 15:10:58.329186 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-kfg4m"] Sep 30 15:10:58 crc kubenswrapper[4783]: I0930 15:10:58.863398 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eed8e23-0cb4-4c1c-9b49-137f20acbba4" path="/var/lib/kubelet/pods/1eed8e23-0cb4-4c1c-9b49-137f20acbba4/volumes" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.487978 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-lqxkz"] Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.489288 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.601873 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.601919 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v94n8\" (UniqueName: \"kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.703489 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.703543 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v94n8\" (UniqueName: \"kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.703656 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.727959 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v94n8\" (UniqueName: \"kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8\") pod \"crc-debug-lqxkz\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: I0930 15:10:59.810957 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:10:59 crc kubenswrapper[4783]: W0930 15:10:59.840480 4783 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9680f23_d0df_48e7_81da_96239176c736.slice/crio-2ee3cae0d7b172c8aa01df30214ed4e7a43b8ed39fa14bcfb1ebfd132c23f8a8 WatchSource:0}: Error finding container 2ee3cae0d7b172c8aa01df30214ed4e7a43b8ed39fa14bcfb1ebfd132c23f8a8: Status 404 returned error can't find the container with id 2ee3cae0d7b172c8aa01df30214ed4e7a43b8ed39fa14bcfb1ebfd132c23f8a8 Sep 30 15:11:00 crc kubenswrapper[4783]: I0930 15:11:00.169991 4783 generic.go:334] "Generic (PLEG): container finished" podID="d9680f23-d0df-48e7-81da-96239176c736" containerID="cbb8d34d172b25b7e8b16e3531ed6971560bc7c17cecef3c0077a96bd8beffc2" exitCode=0 Sep 30 15:11:00 crc kubenswrapper[4783]: I0930 15:11:00.170103 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" event={"ID":"d9680f23-d0df-48e7-81da-96239176c736","Type":"ContainerDied","Data":"cbb8d34d172b25b7e8b16e3531ed6971560bc7c17cecef3c0077a96bd8beffc2"} Sep 30 15:11:00 crc kubenswrapper[4783]: I0930 15:11:00.170375 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" event={"ID":"d9680f23-d0df-48e7-81da-96239176c736","Type":"ContainerStarted","Data":"2ee3cae0d7b172c8aa01df30214ed4e7a43b8ed39fa14bcfb1ebfd132c23f8a8"} Sep 30 15:11:00 crc kubenswrapper[4783]: I0930 15:11:00.216166 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-lqxkz"] Sep 30 15:11:00 crc kubenswrapper[4783]: I0930 15:11:00.221939 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7dfk5/crc-debug-lqxkz"] Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.262807 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.429597 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host\") pod \"d9680f23-d0df-48e7-81da-96239176c736\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.429656 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v94n8\" (UniqueName: \"kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8\") pod \"d9680f23-d0df-48e7-81da-96239176c736\" (UID: \"d9680f23-d0df-48e7-81da-96239176c736\") " Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.429730 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host" (OuterVolumeSpecName: "host") pod "d9680f23-d0df-48e7-81da-96239176c736" (UID: "d9680f23-d0df-48e7-81da-96239176c736"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.430166 4783 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d9680f23-d0df-48e7-81da-96239176c736-host\") on node \"crc\" DevicePath \"\"" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.436709 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8" (OuterVolumeSpecName: "kube-api-access-v94n8") pod "d9680f23-d0df-48e7-81da-96239176c736" (UID: "d9680f23-d0df-48e7-81da-96239176c736"). InnerVolumeSpecName "kube-api-access-v94n8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.532074 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v94n8\" (UniqueName: \"kubernetes.io/projected/d9680f23-d0df-48e7-81da-96239176c736-kube-api-access-v94n8\") on node \"crc\" DevicePath \"\"" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.761777 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/util/0.log" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.893113 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/util/0.log" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.899089 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/pull/0.log" Sep 30 15:11:01 crc kubenswrapper[4783]: I0930 15:11:01.955618 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/pull/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.108745 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/pull/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.110244 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/util/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.141922 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5ffb62280253d85b2af235b10999d22be8e5524c9accad58ab6091a4694hrwg_bb7e3b6e-d9cb-4a1b-a5be-44612248b60a/extract/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.188496 4783 scope.go:117] "RemoveContainer" containerID="cbb8d34d172b25b7e8b16e3531ed6971560bc7c17cecef3c0077a96bd8beffc2" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.188803 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/crc-debug-lqxkz" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.276398 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-bd2g9_3b26411c-5b67-4660-9994-0500516afb9e/kube-rbac-proxy/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.367813 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-bd2g9_3b26411c-5b67-4660-9994-0500516afb9e/manager/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.440103 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-npt95_a3064510-3c7e-4094-a3b6-ae572fba1a95/kube-rbac-proxy/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.479572 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-npt95_a3064510-3c7e-4094-a3b6-ae572fba1a95/manager/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.584768 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-5x5rg_0c47e365-48fb-43e8-9932-04850ec2344d/kube-rbac-proxy/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.730430 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-5x5rg_0c47e365-48fb-43e8-9932-04850ec2344d/manager/0.log" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.854090 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9680f23-d0df-48e7-81da-96239176c736" path="/var/lib/kubelet/pods/d9680f23-d0df-48e7-81da-96239176c736/volumes" Sep 30 15:11:02 crc kubenswrapper[4783]: I0930 15:11:02.883556 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-d7vtg_30aabf9c-e0d4-44ac-ae33-b5f7784941ce/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.023294 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-d7vtg_30aabf9c-e0d4-44ac-ae33-b5f7784941ce/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.075617 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8qnst_60247f30-05f5-49e6-81f8-7a91203afa8e/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.079904 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-8qnst_60247f30-05f5-49e6-81f8-7a91203afa8e/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.216676 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-l2n7h_58b3d85e-a497-4e26-98ab-89101226c62a/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.246906 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-l2n7h_58b3d85e-a497-4e26-98ab-89101226c62a/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.303671 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d9c7d9477-z27gn_d6159fd2-85d7-4d0b-8c24-042468d2cff3/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.440816 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-t2dkk_3b82a357-f2a6-42e3-8d29-88368c0a3e43/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.520541 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f589bc7f7-t2dkk_3b82a357-f2a6-42e3-8d29-88368c0a3e43/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.541544 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d9c7d9477-z27gn_d6159fd2-85d7-4d0b-8c24-042468d2cff3/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.556811 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.556852 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.615408 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.735570 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-prn5v_fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.801291 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-prn5v_fcc8d2d0-1c61-4f4a-95a4-1ff2f80eb7fa/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.859162 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-mvkjc_58ca2541-d4dc-4a6f-9e5f-0ad539e65808/kube-rbac-proxy/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.933141 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-mvkjc_58ca2541-d4dc-4a6f-9e5f-0ad539e65808/manager/0.log" Sep 30 15:11:03 crc kubenswrapper[4783]: I0930 15:11:03.993892 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-vd9cw_26213271-94ee-4549-99d6-b30ba62e00fc/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.050784 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-vd9cw_26213271-94ee-4549-99d6-b30ba62e00fc/manager/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.183941 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8l52m_52109482-0b58-40de-a483-cfa4b8e33eee/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.253870 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.267423 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6b96467f46-8l52m_52109482-0b58-40de-a483-cfa4b8e33eee/manager/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.303270 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.346334 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-lpmr4_53bff1a7-c605-4e9e-8311-9157240d03b4/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.516961 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79f9fc9fd8-lpmr4_53bff1a7-c605-4e9e-8311-9157240d03b4/manager/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.534888 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-h9b4s_c9f201b9-c0fe-4a29-bcad-6ae49742b3ff/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.538723 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fb7d6b8bf-h9b4s_c9f201b9-c0fe-4a29-bcad-6ae49742b3ff/manager/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.688709 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq_ade21fb8-be1e-4bdd-a8a7-16c6d2124570/manager/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.689066 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-86b7cb4c5fr8mlq_ade21fb8-be1e-4bdd-a8a7-16c6d2124570/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.896922 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b7bb8bd67-qzlt6_45485bf9-44cc-4f01-a72e-85f9efc22357/kube-rbac-proxy/0.log" Sep 30 15:11:04 crc kubenswrapper[4783]: I0930 15:11:04.935836 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-56dc567787-l5qvt_2939c11a-b620-4766-8e26-97ef66a8f4ad/kube-rbac-proxy/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.259521 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-56dc567787-l5qvt_2939c11a-b620-4766-8e26-97ef66a8f4ad/operator/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.339507 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-9c5wp_e71f47e2-3ec5-4c17-b78b-c2965f54fb22/registry-server/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.441524 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-59mrh_8c02b85f-2ca3-48a6-86ad-01ce12288522/kube-rbac-proxy/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.572389 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-q6vmx_f2e6ebfe-0822-4070-b60c-64974705fd4e/kube-rbac-proxy/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.578610 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-59mrh_8c02b85f-2ca3-48a6-86ad-01ce12288522/manager/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.716532 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-q6vmx_f2e6ebfe-0822-4070-b60c-64974705fd4e/manager/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.816413 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-mms9t_987e3b87-31ee-4a44-8829-61e239d3945a/operator/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.967260 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b7bb8bd67-qzlt6_45485bf9-44cc-4f01-a72e-85f9efc22357/manager/0.log" Sep 30 15:11:05 crc kubenswrapper[4783]: I0930 15:11:05.969243 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-56wct_c1065d45-bf09-456a-af5c-23f62fb0780c/kube-rbac-proxy/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.035632 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-657c6b68c7-56wct_c1065d45-bf09-456a-af5c-23f62fb0780c/manager/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.061107 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-ml9lk_137c2cf3-bda8-4da6-a9c1-d430e636c745/kube-rbac-proxy/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.189446 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-nk8b4_719cc658-fa4f-4ae9-878a-47e8fdc9c2cc/kube-rbac-proxy/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.194832 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-ml9lk_137c2cf3-bda8-4da6-a9c1-d430e636c745/manager/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.218888 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mhk26" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="registry-server" containerID="cri-o://1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245" gracePeriod=2 Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.220112 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb97fcf96-nk8b4_719cc658-fa4f-4ae9-878a-47e8fdc9c2cc/manager/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.333053 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-ntrpm_d964b590-5ba0-4c6b-bee8-3c52f4950d9f/kube-rbac-proxy/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.384593 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75756dd4d9-ntrpm_d964b590-5ba0-4c6b-bee8-3c52f4950d9f/manager/0.log" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.640466 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.813398 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp2tw\" (UniqueName: \"kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw\") pod \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.813462 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities\") pod \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.813712 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content\") pod \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\" (UID: \"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a\") " Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.814516 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities" (OuterVolumeSpecName: "utilities") pod "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" (UID: "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.826535 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw" (OuterVolumeSpecName: "kube-api-access-dp2tw") pod "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" (UID: "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a"). InnerVolumeSpecName "kube-api-access-dp2tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.831543 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" (UID: "fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.915326 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.915358 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp2tw\" (UniqueName: \"kubernetes.io/projected/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-kube-api-access-dp2tw\") on node \"crc\" DevicePath \"\"" Sep 30 15:11:06 crc kubenswrapper[4783]: I0930 15:11:06.915371 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.228087 4783 generic.go:334] "Generic (PLEG): container finished" podID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerID="1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245" exitCode=0 Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.228156 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhk26" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.228177 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerDied","Data":"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245"} Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.228550 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhk26" event={"ID":"fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a","Type":"ContainerDied","Data":"778e565a13afe60b1fb7ef86d02ea91857c1de6f51b48381281dd12b5f1d6ca4"} Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.228574 4783 scope.go:117] "RemoveContainer" containerID="1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.248263 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.249076 4783 scope.go:117] "RemoveContainer" containerID="6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.263113 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhk26"] Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.265811 4783 scope.go:117] "RemoveContainer" containerID="c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.307201 4783 scope.go:117] "RemoveContainer" containerID="1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245" Sep 30 15:11:07 crc kubenswrapper[4783]: E0930 15:11:07.307766 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245\": container with ID starting with 1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245 not found: ID does not exist" containerID="1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.307801 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245"} err="failed to get container status \"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245\": rpc error: code = NotFound desc = could not find container \"1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245\": container with ID starting with 1b8ba837f1b0b60d47d75054b0f4d9dff4d89a4ce1c41630c17bb7aa85823245 not found: ID does not exist" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.307827 4783 scope.go:117] "RemoveContainer" containerID="6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6" Sep 30 15:11:07 crc kubenswrapper[4783]: E0930 15:11:07.308146 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6\": container with ID starting with 6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6 not found: ID does not exist" containerID="6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.308173 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6"} err="failed to get container status \"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6\": rpc error: code = NotFound desc = could not find container \"6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6\": container with ID starting with 6c8ac09a63fe35318132941bc844031deffba87bd48aba4ccc3a12b339a625a6 not found: ID does not exist" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.308188 4783 scope.go:117] "RemoveContainer" containerID="c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e" Sep 30 15:11:07 crc kubenswrapper[4783]: E0930 15:11:07.308480 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e\": container with ID starting with c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e not found: ID does not exist" containerID="c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.308519 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e"} err="failed to get container status \"c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e\": rpc error: code = NotFound desc = could not find container \"c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e\": container with ID starting with c7872bcc7a7d062ee0d48a383c27ae6a345b5cd7ae5ff1b4b04dc2e3dde3123e not found: ID does not exist" Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.673645 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:11:07 crc kubenswrapper[4783]: I0930 15:11:07.673707 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:11:08 crc kubenswrapper[4783]: I0930 15:11:08.852778 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" path="/var/lib/kubelet/pods/fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a/volumes" Sep 30 15:11:19 crc kubenswrapper[4783]: I0930 15:11:19.602073 4783 scope.go:117] "RemoveContainer" containerID="ee508742a7e5e176ce2901b21d61ade7cf17e46e79695de0c5a9b0bceb5f2d9e" Sep 30 15:11:19 crc kubenswrapper[4783]: I0930 15:11:19.626194 4783 scope.go:117] "RemoveContainer" containerID="8c6e9350ff19d10a57d781a324f52cd8f0cd6df6fa77d3fca9fbbe02df0715c3" Sep 30 15:11:20 crc kubenswrapper[4783]: I0930 15:11:20.760186 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5fpgz_01b72b4c-9858-4ddf-9436-557dbb523e7d/control-plane-machine-set-operator/0.log" Sep 30 15:11:20 crc kubenswrapper[4783]: I0930 15:11:20.935212 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-4lcz8_199c8590-e441-428e-99f7-baf1f24b3900/kube-rbac-proxy/0.log" Sep 30 15:11:20 crc kubenswrapper[4783]: I0930 15:11:20.974010 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-4lcz8_199c8590-e441-428e-99f7-baf1f24b3900/machine-api-operator/0.log" Sep 30 15:11:31 crc kubenswrapper[4783]: I0930 15:11:31.724576 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-w6ft5_8d49829c-32eb-424e-9d8a-ca14c7f9d7fc/cert-manager-controller/0.log" Sep 30 15:11:31 crc kubenswrapper[4783]: I0930 15:11:31.887549 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-tx4c4_72af3d05-5e53-4c83-a786-8574f0b34aa3/cert-manager-cainjector/0.log" Sep 30 15:11:31 crc kubenswrapper[4783]: I0930 15:11:31.890060 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-dtrgf_1ed5cd4e-785b-4581-92a8-9fe49236902e/cert-manager-webhook/0.log" Sep 30 15:11:37 crc kubenswrapper[4783]: I0930 15:11:37.674281 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:11:37 crc kubenswrapper[4783]: I0930 15:11:37.674796 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.384053 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-m9kbd_87a6adf2-811c-4d06-9d91-9ec8608884ac/nmstate-console-plugin/0.log" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.613589 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-rtqrb_70a55481-e973-4943-9e2b-48b96623f8a8/nmstate-handler/0.log" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.651765 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k96qg_bca29d47-0e6d-4623-896c-e4ea4ddf1c14/kube-rbac-proxy/0.log" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.718512 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k96qg_bca29d47-0e6d-4623-896c-e4ea4ddf1c14/nmstate-metrics/0.log" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.823860 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-2j82g_b1643e3b-aa5b-4ab1-8d01-0d16bb277b1c/nmstate-operator/0.log" Sep 30 15:11:43 crc kubenswrapper[4783]: I0930 15:11:43.939812 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-rvbkm_12776533-ac8c-4d72-b6d1-0a9e46184617/nmstate-webhook/0.log" Sep 30 15:11:57 crc kubenswrapper[4783]: I0930 15:11:57.388896 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-mxhr5_7faa07ce-5317-4817-a37d-66ec8ab6c2cd/kube-rbac-proxy/0.log" Sep 30 15:11:57 crc kubenswrapper[4783]: I0930 15:11:57.637051 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-zbpb2_f7685957-e95a-4b5e-af72-6cede8277b41/frr-k8s-webhook-server/0.log" Sep 30 15:11:57 crc kubenswrapper[4783]: I0930 15:11:57.845822 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-frr-files/0.log" Sep 30 15:11:57 crc kubenswrapper[4783]: I0930 15:11:57.904296 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-mxhr5_7faa07ce-5317-4817-a37d-66ec8ab6c2cd/controller/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.008126 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-reloader/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.013858 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-frr-files/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.083437 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-metrics/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.128485 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-reloader/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.279580 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-metrics/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.282499 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-frr-files/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.291081 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-reloader/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.337482 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-metrics/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.470400 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-frr-files/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.482404 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-reloader/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.488153 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/cp-metrics/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.522085 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/controller/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.684960 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/kube-rbac-proxy/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.716357 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/frr-metrics/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.767324 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/kube-rbac-proxy-frr/0.log" Sep 30 15:11:58 crc kubenswrapper[4783]: I0930 15:11:58.878936 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/reloader/0.log" Sep 30 15:11:59 crc kubenswrapper[4783]: I0930 15:11:59.038912 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6d6c5dbbcc-sqqgd_81fee3e2-b4bc-46a3-85dc-3a15c11a1620/manager/0.log" Sep 30 15:11:59 crc kubenswrapper[4783]: I0930 15:11:59.227388 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-c6c769cb9-27kpp_67f22223-c5d5-43af-9c5c-0791aec426e7/webhook-server/0.log" Sep 30 15:11:59 crc kubenswrapper[4783]: I0930 15:11:59.336012 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-sfsjm_f3657dc0-cffe-4309-ba77-ee5c025db0b5/kube-rbac-proxy/0.log" Sep 30 15:11:59 crc kubenswrapper[4783]: I0930 15:11:59.974967 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-sfsjm_f3657dc0-cffe-4309-ba77-ee5c025db0b5/speaker/0.log" Sep 30 15:12:00 crc kubenswrapper[4783]: I0930 15:12:00.437056 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-z6csc_26904722-3358-41d0-9485-379c77a69694/frr/0.log" Sep 30 15:12:07 crc kubenswrapper[4783]: I0930 15:12:07.673459 4783 patch_prober.go:28] interesting pod/machine-config-daemon-668zf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 15:12:07 crc kubenswrapper[4783]: I0930 15:12:07.674070 4783 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 15:12:07 crc kubenswrapper[4783]: I0930 15:12:07.674134 4783 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-668zf" Sep 30 15:12:07 crc kubenswrapper[4783]: I0930 15:12:07.675080 4783 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2"} pod="openshift-machine-config-operator/machine-config-daemon-668zf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 15:12:07 crc kubenswrapper[4783]: I0930 15:12:07.675200 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerName="machine-config-daemon" containerID="cri-o://04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" gracePeriod=600 Sep 30 15:12:07 crc kubenswrapper[4783]: E0930 15:12:07.811905 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:12:08 crc kubenswrapper[4783]: I0930 15:12:08.704068 4783 generic.go:334] "Generic (PLEG): container finished" podID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" exitCode=0 Sep 30 15:12:08 crc kubenswrapper[4783]: I0930 15:12:08.704070 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-668zf" event={"ID":"b379cdd3-61d0-47bd-8d9c-4f7809bb75cb","Type":"ContainerDied","Data":"04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2"} Sep 30 15:12:08 crc kubenswrapper[4783]: I0930 15:12:08.704158 4783 scope.go:117] "RemoveContainer" containerID="02fca664b73840f45075e91eaba0fd3e357ad5132d18118ff03f8a09e061e7d8" Sep 30 15:12:08 crc kubenswrapper[4783]: I0930 15:12:08.704896 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:12:08 crc kubenswrapper[4783]: E0930 15:12:08.705313 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.192888 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/util/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.375639 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/util/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.450898 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/pull/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.451546 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/pull/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.568298 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/util/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.592868 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/pull/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.674496 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69qjb59_4398f894-90ea-458d-8719-40757c59780c/extract/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.767927 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/util/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.924832 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/pull/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.928932 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/pull/0.log" Sep 30 15:12:11 crc kubenswrapper[4783]: I0930 15:12:11.964293 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/util/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.129923 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/extract/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.132993 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/util/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.153141 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bct4vkq_d73fa222-1e59-42b8-bb54-9a47ead75914/pull/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.324204 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-utilities/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.472426 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-utilities/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.485312 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-content/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.502639 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-content/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.684068 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-utilities/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.712474 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/extract-content/0.log" Sep 30 15:12:12 crc kubenswrapper[4783]: I0930 15:12:12.928378 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-utilities/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.160720 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-utilities/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.171211 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-content/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.191678 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-content/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.427998 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-utilities/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.430656 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/extract-content/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.654701 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/util/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.790607 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-snhkr_3825052e-49a3-48c6-aff5-7b30d755ff3f/registry-server/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.826955 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4cxc_cfaadd1a-d3fd-4153-8a32-6684619bcb8c/registry-server/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.891437 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/util/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.923188 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/pull/0.log" Sep 30 15:12:13 crc kubenswrapper[4783]: I0930 15:12:13.955257 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/pull/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.131247 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/pull/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.131708 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/util/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.166522 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96b85dp_6619aa41-665c-4e5e-bbbd-be79c4c2db9f/extract/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.287604 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lrx8q_ac751948-b749-45af-8006-6dfe52d63607/marketplace-operator/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.347995 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-utilities/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.487109 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-content/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.501056 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-utilities/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.517282 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-content/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.655595 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-utilities/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.667206 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/extract-content/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.758441 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-utilities/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.903811 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-74bz9_f337bfc3-dbc0-44a9-94ac-26e55012e353/registry-server/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.967627 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-content/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.983596 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-utilities/0.log" Sep 30 15:12:14 crc kubenswrapper[4783]: I0930 15:12:14.985557 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-content/0.log" Sep 30 15:12:15 crc kubenswrapper[4783]: I0930 15:12:15.108891 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-content/0.log" Sep 30 15:12:15 crc kubenswrapper[4783]: I0930 15:12:15.112771 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/extract-utilities/0.log" Sep 30 15:12:15 crc kubenswrapper[4783]: I0930 15:12:15.553327 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9jkbq_39e74efa-c715-4736-8ed9-98a8ae696f8f/registry-server/0.log" Sep 30 15:12:19 crc kubenswrapper[4783]: I0930 15:12:19.766495 4783 scope.go:117] "RemoveContainer" containerID="ecbca7a37f0349ac6e3754e055ac7dc36a19f55dcd94c4e11836a1f391013a4c" Sep 30 15:12:19 crc kubenswrapper[4783]: I0930 15:12:19.842735 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:12:19 crc kubenswrapper[4783]: E0930 15:12:19.843154 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:12:30 crc kubenswrapper[4783]: I0930 15:12:30.848952 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:12:30 crc kubenswrapper[4783]: E0930 15:12:30.849818 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:12:41 crc kubenswrapper[4783]: I0930 15:12:41.843708 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:12:41 crc kubenswrapper[4783]: E0930 15:12:41.844718 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:12:52 crc kubenswrapper[4783]: I0930 15:12:52.843704 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:12:52 crc kubenswrapper[4783]: E0930 15:12:52.844440 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:13:04 crc kubenswrapper[4783]: I0930 15:13:04.844145 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:13:04 crc kubenswrapper[4783]: E0930 15:13:04.845308 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:13:19 crc kubenswrapper[4783]: I0930 15:13:19.843725 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:13:19 crc kubenswrapper[4783]: E0930 15:13:19.844808 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:13:31 crc kubenswrapper[4783]: I0930 15:13:31.842825 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:13:31 crc kubenswrapper[4783]: E0930 15:13:31.843644 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:13:45 crc kubenswrapper[4783]: I0930 15:13:45.843272 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:13:45 crc kubenswrapper[4783]: E0930 15:13:45.844449 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:13:53 crc kubenswrapper[4783]: I0930 15:13:53.672300 4783 generic.go:334] "Generic (PLEG): container finished" podID="028a96da-a13d-45eb-beb2-fcf894166faf" containerID="d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461" exitCode=0 Sep 30 15:13:53 crc kubenswrapper[4783]: I0930 15:13:53.672382 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" event={"ID":"028a96da-a13d-45eb-beb2-fcf894166faf","Type":"ContainerDied","Data":"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461"} Sep 30 15:13:53 crc kubenswrapper[4783]: I0930 15:13:53.673483 4783 scope.go:117] "RemoveContainer" containerID="d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461" Sep 30 15:13:54 crc kubenswrapper[4783]: I0930 15:13:54.331687 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7dfk5_must-gather-w7qnh_028a96da-a13d-45eb-beb2-fcf894166faf/gather/0.log" Sep 30 15:13:57 crc kubenswrapper[4783]: I0930 15:13:57.844104 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:13:57 crc kubenswrapper[4783]: E0930 15:13:57.846527 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:14:01 crc kubenswrapper[4783]: I0930 15:14:01.972293 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-7dfk5/must-gather-w7qnh"] Sep 30 15:14:01 crc kubenswrapper[4783]: I0930 15:14:01.973051 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="copy" containerID="cri-o://6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc" gracePeriod=2 Sep 30 15:14:01 crc kubenswrapper[4783]: I0930 15:14:01.983387 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-7dfk5/must-gather-w7qnh"] Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.386827 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7dfk5_must-gather-w7qnh_028a96da-a13d-45eb-beb2-fcf894166faf/copy/0.log" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.388019 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.490213 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output\") pod \"028a96da-a13d-45eb-beb2-fcf894166faf\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.490307 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvxj7\" (UniqueName: \"kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7\") pod \"028a96da-a13d-45eb-beb2-fcf894166faf\" (UID: \"028a96da-a13d-45eb-beb2-fcf894166faf\") " Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.499029 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7" (OuterVolumeSpecName: "kube-api-access-mvxj7") pod "028a96da-a13d-45eb-beb2-fcf894166faf" (UID: "028a96da-a13d-45eb-beb2-fcf894166faf"). InnerVolumeSpecName "kube-api-access-mvxj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.592488 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvxj7\" (UniqueName: \"kubernetes.io/projected/028a96da-a13d-45eb-beb2-fcf894166faf-kube-api-access-mvxj7\") on node \"crc\" DevicePath \"\"" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.646297 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "028a96da-a13d-45eb-beb2-fcf894166faf" (UID: "028a96da-a13d-45eb-beb2-fcf894166faf"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.695404 4783 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/028a96da-a13d-45eb-beb2-fcf894166faf-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.747971 4783 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-7dfk5_must-gather-w7qnh_028a96da-a13d-45eb-beb2-fcf894166faf/copy/0.log" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.748209 4783 generic.go:334] "Generic (PLEG): container finished" podID="028a96da-a13d-45eb-beb2-fcf894166faf" containerID="6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc" exitCode=143 Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.748268 4783 scope.go:117] "RemoveContainer" containerID="6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.748376 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-7dfk5/must-gather-w7qnh" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.785742 4783 scope.go:117] "RemoveContainer" containerID="d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.856698 4783 scope.go:117] "RemoveContainer" containerID="6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc" Sep 30 15:14:02 crc kubenswrapper[4783]: E0930 15:14:02.857275 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc\": container with ID starting with 6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc not found: ID does not exist" containerID="6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.857324 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc"} err="failed to get container status \"6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc\": rpc error: code = NotFound desc = could not find container \"6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc\": container with ID starting with 6bca4eddf7e2f303955cdcff83f61aa5fb6f8ea75de89d374217de54d3d280fc not found: ID does not exist" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.857358 4783 scope.go:117] "RemoveContainer" containerID="d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461" Sep 30 15:14:02 crc kubenswrapper[4783]: E0930 15:14:02.859596 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461\": container with ID starting with d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461 not found: ID does not exist" containerID="d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.859631 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461"} err="failed to get container status \"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461\": rpc error: code = NotFound desc = could not find container \"d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461\": container with ID starting with d1704fa6fe1be1285afba426699dbfe394116eae4ec8238be671f2ffc7502461 not found: ID does not exist" Sep 30 15:14:02 crc kubenswrapper[4783]: I0930 15:14:02.860373 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" path="/var/lib/kubelet/pods/028a96da-a13d-45eb-beb2-fcf894166faf/volumes" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.862085 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863430 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="extract-utilities" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863489 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="extract-utilities" Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863508 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="gather" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863519 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="gather" Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863592 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9680f23-d0df-48e7-81da-96239176c736" containerName="container-00" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863604 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9680f23-d0df-48e7-81da-96239176c736" containerName="container-00" Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863621 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="copy" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863663 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="copy" Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863680 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="registry-server" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863687 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="registry-server" Sep 30 15:14:06 crc kubenswrapper[4783]: E0930 15:14:06.863711 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="extract-content" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.863751 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="extract-content" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.864083 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="gather" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.864114 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8f7abc-d1b4-4ddc-9172-0d6be83ee84a" containerName="registry-server" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.864165 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="028a96da-a13d-45eb-beb2-fcf894166faf" containerName="copy" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.864180 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9680f23-d0df-48e7-81da-96239176c736" containerName="container-00" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.865697 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.879670 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.963233 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.963300 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:06 crc kubenswrapper[4783]: I0930 15:14:06.963490 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnqb4\" (UniqueName: \"kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.065042 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.065122 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnqb4\" (UniqueName: \"kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.065211 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.065719 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.065732 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.094993 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnqb4\" (UniqueName: \"kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4\") pod \"redhat-operators-m7kb9\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.203236 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.641918 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:07 crc kubenswrapper[4783]: I0930 15:14:07.804809 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerStarted","Data":"cb729c01a38a873a43facfc15073ff439344cc94034c36b77d32af8470fd96b2"} Sep 30 15:14:08 crc kubenswrapper[4783]: I0930 15:14:08.819646 4783 generic.go:334] "Generic (PLEG): container finished" podID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerID="1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2" exitCode=0 Sep 30 15:14:08 crc kubenswrapper[4783]: I0930 15:14:08.819697 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerDied","Data":"1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2"} Sep 30 15:14:09 crc kubenswrapper[4783]: I0930 15:14:09.828405 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerStarted","Data":"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc"} Sep 30 15:14:10 crc kubenswrapper[4783]: I0930 15:14:10.838186 4783 generic.go:334] "Generic (PLEG): container finished" podID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerID="df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc" exitCode=0 Sep 30 15:14:10 crc kubenswrapper[4783]: I0930 15:14:10.838284 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerDied","Data":"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc"} Sep 30 15:14:11 crc kubenswrapper[4783]: I0930 15:14:11.851731 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerStarted","Data":"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a"} Sep 30 15:14:11 crc kubenswrapper[4783]: I0930 15:14:11.876415 4783 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m7kb9" podStartSLOduration=3.414977655 podStartE2EDuration="5.87638243s" podCreationTimestamp="2025-09-30 15:14:06 +0000 UTC" firstStartedPulling="2025-09-30 15:14:08.82114884 +0000 UTC m=+5948.752615157" lastFinishedPulling="2025-09-30 15:14:11.282553625 +0000 UTC m=+5951.214019932" observedRunningTime="2025-09-30 15:14:11.872822587 +0000 UTC m=+5951.804288894" watchObservedRunningTime="2025-09-30 15:14:11.87638243 +0000 UTC m=+5951.807848817" Sep 30 15:14:12 crc kubenswrapper[4783]: I0930 15:14:12.842933 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:14:12 crc kubenswrapper[4783]: E0930 15:14:12.843412 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:14:17 crc kubenswrapper[4783]: I0930 15:14:17.203950 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:17 crc kubenswrapper[4783]: I0930 15:14:17.204821 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:17 crc kubenswrapper[4783]: I0930 15:14:17.289144 4783 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:17 crc kubenswrapper[4783]: I0930 15:14:17.978625 4783 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:18 crc kubenswrapper[4783]: I0930 15:14:18.026291 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:19 crc kubenswrapper[4783]: I0930 15:14:19.920040 4783 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m7kb9" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="registry-server" containerID="cri-o://df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a" gracePeriod=2 Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.417587 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.547777 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content\") pod \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.547913 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnqb4\" (UniqueName: \"kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4\") pod \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.548044 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities\") pod \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\" (UID: \"5147c86e-5a96-4b20-bdc0-42996d2b63f1\") " Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.549026 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities" (OuterVolumeSpecName: "utilities") pod "5147c86e-5a96-4b20-bdc0-42996d2b63f1" (UID: "5147c86e-5a96-4b20-bdc0-42996d2b63f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.554280 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4" (OuterVolumeSpecName: "kube-api-access-gnqb4") pod "5147c86e-5a96-4b20-bdc0-42996d2b63f1" (UID: "5147c86e-5a96-4b20-bdc0-42996d2b63f1"). InnerVolumeSpecName "kube-api-access-gnqb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.640765 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5147c86e-5a96-4b20-bdc0-42996d2b63f1" (UID: "5147c86e-5a96-4b20-bdc0-42996d2b63f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.650702 4783 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.650737 4783 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5147c86e-5a96-4b20-bdc0-42996d2b63f1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.650749 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnqb4\" (UniqueName: \"kubernetes.io/projected/5147c86e-5a96-4b20-bdc0-42996d2b63f1-kube-api-access-gnqb4\") on node \"crc\" DevicePath \"\"" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.941373 4783 generic.go:334] "Generic (PLEG): container finished" podID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerID="df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a" exitCode=0 Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.941510 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7kb9" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.941524 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerDied","Data":"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a"} Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.942485 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7kb9" event={"ID":"5147c86e-5a96-4b20-bdc0-42996d2b63f1","Type":"ContainerDied","Data":"cb729c01a38a873a43facfc15073ff439344cc94034c36b77d32af8470fd96b2"} Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.942560 4783 scope.go:117] "RemoveContainer" containerID="df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a" Sep 30 15:14:21 crc kubenswrapper[4783]: I0930 15:14:21.975402 4783 scope.go:117] "RemoveContainer" containerID="df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.000062 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.006365 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m7kb9"] Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.014506 4783 scope.go:117] "RemoveContainer" containerID="1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.046314 4783 scope.go:117] "RemoveContainer" containerID="df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a" Sep 30 15:14:22 crc kubenswrapper[4783]: E0930 15:14:22.046859 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a\": container with ID starting with df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a not found: ID does not exist" containerID="df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.046902 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a"} err="failed to get container status \"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a\": rpc error: code = NotFound desc = could not find container \"df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a\": container with ID starting with df4775a34f932c72f1dd0ede095654039fc40cefdb08043c1d78938f6e311b1a not found: ID does not exist" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.046935 4783 scope.go:117] "RemoveContainer" containerID="df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc" Sep 30 15:14:22 crc kubenswrapper[4783]: E0930 15:14:22.047367 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc\": container with ID starting with df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc not found: ID does not exist" containerID="df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.047448 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc"} err="failed to get container status \"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc\": rpc error: code = NotFound desc = could not find container \"df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc\": container with ID starting with df989f62512ff6082ba6629797ec506ea768462914475f5d687a84c1eeeb11fc not found: ID does not exist" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.047484 4783 scope.go:117] "RemoveContainer" containerID="1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2" Sep 30 15:14:22 crc kubenswrapper[4783]: E0930 15:14:22.048188 4783 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2\": container with ID starting with 1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2 not found: ID does not exist" containerID="1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.048233 4783 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2"} err="failed to get container status \"1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2\": rpc error: code = NotFound desc = could not find container \"1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2\": container with ID starting with 1cf2c4b58b66585fc5f64bd189b26a0cead761932c26fb4725ac1c4a8764a9c2 not found: ID does not exist" Sep 30 15:14:22 crc kubenswrapper[4783]: I0930 15:14:22.852278 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" path="/var/lib/kubelet/pods/5147c86e-5a96-4b20-bdc0-42996d2b63f1/volumes" Sep 30 15:14:24 crc kubenswrapper[4783]: I0930 15:14:24.843374 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:14:24 crc kubenswrapper[4783]: E0930 15:14:24.843820 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:14:37 crc kubenswrapper[4783]: I0930 15:14:37.843364 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:14:37 crc kubenswrapper[4783]: E0930 15:14:37.844290 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:14:52 crc kubenswrapper[4783]: I0930 15:14:52.843127 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:14:52 crc kubenswrapper[4783]: E0930 15:14:52.843705 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.169449 4783 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh"] Sep 30 15:15:00 crc kubenswrapper[4783]: E0930 15:15:00.171177 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="extract-utilities" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.171206 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="extract-utilities" Sep 30 15:15:00 crc kubenswrapper[4783]: E0930 15:15:00.171229 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="extract-content" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.171238 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="extract-content" Sep 30 15:15:00 crc kubenswrapper[4783]: E0930 15:15:00.171292 4783 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="registry-server" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.171302 4783 state_mem.go:107] "Deleted CPUSet assignment" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="registry-server" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.171532 4783 memory_manager.go:354] "RemoveStaleState removing state" podUID="5147c86e-5a96-4b20-bdc0-42996d2b63f1" containerName="registry-server" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.172553 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.175149 4783 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.175404 4783 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.181233 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh"] Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.211585 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.211657 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.211712 4783 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65tmw\" (UniqueName: \"kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.313091 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.313140 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.313181 4783 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65tmw\" (UniqueName: \"kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.314717 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.322874 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.335412 4783 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65tmw\" (UniqueName: \"kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw\") pod \"collect-profiles-29320755-x7nhh\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.510122 4783 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:00 crc kubenswrapper[4783]: I0930 15:15:00.941518 4783 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh"] Sep 30 15:15:01 crc kubenswrapper[4783]: I0930 15:15:01.330344 4783 generic.go:334] "Generic (PLEG): container finished" podID="47df3702-129e-4e11-9ef9-9c60d0ff30d2" containerID="28f050c7e91ceb7172f8faf6964387c80096de3da83d7e38a5bad4102cf651cd" exitCode=0 Sep 30 15:15:01 crc kubenswrapper[4783]: I0930 15:15:01.330488 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" event={"ID":"47df3702-129e-4e11-9ef9-9c60d0ff30d2","Type":"ContainerDied","Data":"28f050c7e91ceb7172f8faf6964387c80096de3da83d7e38a5bad4102cf651cd"} Sep 30 15:15:01 crc kubenswrapper[4783]: I0930 15:15:01.330743 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" event={"ID":"47df3702-129e-4e11-9ef9-9c60d0ff30d2","Type":"ContainerStarted","Data":"a2898bf2e3b686c41cbac2eab28236eaed7fb5bb0a3f1321c1f6eb5d64f128f5"} Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.725286 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.854998 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume\") pod \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.855041 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65tmw\" (UniqueName: \"kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw\") pod \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.855220 4783 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume\") pod \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\" (UID: \"47df3702-129e-4e11-9ef9-9c60d0ff30d2\") " Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.855826 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume" (OuterVolumeSpecName: "config-volume") pod "47df3702-129e-4e11-9ef9-9c60d0ff30d2" (UID: "47df3702-129e-4e11-9ef9-9c60d0ff30d2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.860742 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw" (OuterVolumeSpecName: "kube-api-access-65tmw") pod "47df3702-129e-4e11-9ef9-9c60d0ff30d2" (UID: "47df3702-129e-4e11-9ef9-9c60d0ff30d2"). InnerVolumeSpecName "kube-api-access-65tmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.861148 4783 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "47df3702-129e-4e11-9ef9-9c60d0ff30d2" (UID: "47df3702-129e-4e11-9ef9-9c60d0ff30d2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.957825 4783 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47df3702-129e-4e11-9ef9-9c60d0ff30d2-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.957858 4783 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47df3702-129e-4e11-9ef9-9c60d0ff30d2-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 15:15:02 crc kubenswrapper[4783]: I0930 15:15:02.957869 4783 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65tmw\" (UniqueName: \"kubernetes.io/projected/47df3702-129e-4e11-9ef9-9c60d0ff30d2-kube-api-access-65tmw\") on node \"crc\" DevicePath \"\"" Sep 30 15:15:03 crc kubenswrapper[4783]: I0930 15:15:03.350204 4783 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" event={"ID":"47df3702-129e-4e11-9ef9-9c60d0ff30d2","Type":"ContainerDied","Data":"a2898bf2e3b686c41cbac2eab28236eaed7fb5bb0a3f1321c1f6eb5d64f128f5"} Sep 30 15:15:03 crc kubenswrapper[4783]: I0930 15:15:03.350266 4783 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2898bf2e3b686c41cbac2eab28236eaed7fb5bb0a3f1321c1f6eb5d64f128f5" Sep 30 15:15:03 crc kubenswrapper[4783]: I0930 15:15:03.350361 4783 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320755-x7nhh" Sep 30 15:15:03 crc kubenswrapper[4783]: I0930 15:15:03.809622 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p"] Sep 30 15:15:03 crc kubenswrapper[4783]: I0930 15:15:03.815355 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320710-kkv2p"] Sep 30 15:15:04 crc kubenswrapper[4783]: I0930 15:15:04.842782 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:15:04 crc kubenswrapper[4783]: E0930 15:15:04.843338 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:04 crc kubenswrapper[4783]: I0930 15:15:04.852878 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c4f99f8-995d-478e-bee7-0fb19f14902c" path="/var/lib/kubelet/pods/8c4f99f8-995d-478e-bee7-0fb19f14902c/volumes" Sep 30 15:15:16 crc kubenswrapper[4783]: I0930 15:15:16.843329 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:15:16 crc kubenswrapper[4783]: E0930 15:15:16.844120 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:19 crc kubenswrapper[4783]: I0930 15:15:19.943072 4783 scope.go:117] "RemoveContainer" containerID="2d22f0e1bb9a63dd1d804cb417605f85c551fe9b819969b9efcab6955ae6595f" Sep 30 15:15:28 crc kubenswrapper[4783]: I0930 15:15:28.843039 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:15:28 crc kubenswrapper[4783]: E0930 15:15:28.843747 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:41 crc kubenswrapper[4783]: I0930 15:15:41.844039 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:15:41 crc kubenswrapper[4783]: E0930 15:15:41.844878 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:46 crc kubenswrapper[4783]: I0930 15:15:46.088824 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fc8rm"] Sep 30 15:15:46 crc kubenswrapper[4783]: I0930 15:15:46.094942 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fc8rm"] Sep 30 15:15:46 crc kubenswrapper[4783]: I0930 15:15:46.855037 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa" path="/var/lib/kubelet/pods/3443af67-9eb6-4e48-a3c0-ef1cd3de6eaa/volumes" Sep 30 15:15:56 crc kubenswrapper[4783]: I0930 15:15:56.049049 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1499-account-create-fd5zk"] Sep 30 15:15:56 crc kubenswrapper[4783]: I0930 15:15:56.058507 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-1499-account-create-fd5zk"] Sep 30 15:15:56 crc kubenswrapper[4783]: I0930 15:15:56.843653 4783 scope.go:117] "RemoveContainer" containerID="04810b4d27adf74c23996751b7d053d041d883998bd5a6188f669d81492deff2" Sep 30 15:15:56 crc kubenswrapper[4783]: E0930 15:15:56.844382 4783 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-668zf_openshift-machine-config-operator(b379cdd3-61d0-47bd-8d9c-4f7809bb75cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-668zf" podUID="b379cdd3-61d0-47bd-8d9c-4f7809bb75cb" Sep 30 15:15:56 crc kubenswrapper[4783]: I0930 15:15:56.856958 4783 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b70c4ecb-466c-40e1-bf6a-dc15b77cbb21" path="/var/lib/kubelet/pods/b70c4ecb-466c-40e1-bf6a-dc15b77cbb21/volumes" Sep 30 15:16:03 crc kubenswrapper[4783]: I0930 15:16:03.048292 4783 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-p7bgs"] Sep 30 15:16:03 crc kubenswrapper[4783]: I0930 15:16:03.065111 4783 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-p7bgs"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066772072024461 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066772073017377 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066755624016525 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066755624015475 5ustar corecore